35 #include "llvm/ADT/IndexedMap.h"
36 #include "llvm/ADT/PointerEmbeddedInt.h"
37 #include "llvm/ADT/STLExtras.h"
38 #include "llvm/ADT/SmallSet.h"
39 #include "llvm/ADT/StringExtras.h"
40 #include "llvm/Frontend/OpenMP/OMPAssume.h"
41 #include "llvm/Frontend/OpenMP/OMPConstants.h"
45 using namespace clang;
46 using namespace llvm::omp;
59 enum DefaultDataSharingAttributes {
64 DSA_firstprivate = 1 << 3,
74 unsigned Modifier = 0;
75 const Expr *RefExpr =
nullptr;
78 bool AppliedToPointee =
false;
79 DSAVarData() =
default;
83 bool AppliedToPointee)
84 : DKind(DKind), CKind(CKind), Modifier(Modifier), RefExpr(RefExpr),
85 PrivateCopy(PrivateCopy), ImplicitDSALoc(ImplicitDSALoc),
86 AppliedToPointee(AppliedToPointee) {}
88 using OperatorOffsetTy =
90 using DoacrossDependMapTy =
91 llvm::DenseMap<OMPDependClause *, OperatorOffsetTy>;
93 enum class UsesAllocatorsDeclKind {
105 unsigned Modifier = 0;
108 llvm::PointerIntPair<const Expr *, 1, bool> RefExpr;
112 bool AppliedToPointee =
false;
114 using DeclSAMapTy = llvm::SmallDenseMap<const ValueDecl *, DSAInfo, 8>;
115 using UsedRefMapTy = llvm::SmallDenseMap<const ValueDecl *, const Expr *, 8>;
116 using LCDeclInfo = std::pair<unsigned, VarDecl *>;
117 using LoopControlVariablesMapTy =
118 llvm::SmallDenseMap<const ValueDecl *, LCDeclInfo, 8>;
121 struct MappedExprComponentTy {
125 using MappedExprComponentsTy =
126 llvm::DenseMap<const ValueDecl *, MappedExprComponentTy>;
127 using CriticalsWithHintsTy =
128 llvm::StringMap<std::pair<const OMPCriticalDirective *, llvm::APSInt>>;
129 struct ReductionData {
130 using BOKPtrType = llvm::PointerEmbeddedInt<BinaryOperatorKind, 16>;
132 llvm::PointerUnion<const Expr *, BOKPtrType> ReductionOp;
133 ReductionData() =
default;
140 ReductionOp = RefExpr;
143 using DeclReductionMapTy =
144 llvm::SmallDenseMap<const ValueDecl *, ReductionData, 4>;
145 struct DefaultmapInfo {
149 DefaultmapInfo() =
default;
151 : ImplicitBehavior(M), SLoc(Loc) {}
154 struct SharingMapTy {
155 DeclSAMapTy SharingMap;
156 DeclReductionMapTy ReductionMap;
157 UsedRefMapTy AlignedMap;
158 UsedRefMapTy NontemporalMap;
159 MappedExprComponentsTy MappedExprComponents;
160 LoopControlVariablesMapTy LCVMap;
161 DefaultDataSharingAttributes DefaultAttr = DSA_unspecified;
166 Scope *CurScope =
nullptr;
172 DoacrossDependMapTy DoacrossDepends;
176 std::optional<std::pair<const Expr *, OMPOrderedClause *>> OrderedRegion;
177 bool RegionHasOrderConcurrent =
false;
178 unsigned AssociatedLoops = 1;
179 bool HasMutipleLoops =
false;
180 const Decl *PossiblyLoopCounter =
nullptr;
181 bool NowaitRegion =
false;
182 bool UntiedRegion =
false;
183 bool CancelRegion =
false;
184 bool LoopStart =
false;
185 bool BodyComplete =
false;
190 Expr *TaskgroupReductionRef =
nullptr;
199 llvm::DenseMap<CanonicalDeclPtr<const Decl>, UsesAllocatorsDeclKind>
203 struct ImplicitDefaultFDInfoTy {
207 size_t StackLevel = 0;
210 ImplicitDefaultFDInfoTy(
const FieldDecl *FD,
size_t StackLevel,
212 : FD(FD), StackLevel(StackLevel), VD(VD) {}
216 ImplicitDefaultFirstprivateFDs;
217 Expr *DeclareMapperVar =
nullptr;
221 :
Directive(DKind), DirectiveName(Name), CurScope(CurScope),
223 SharingMapTy() =
default;
229 DeclSAMapTy Threadprivates;
236 bool ForceCapturing =
false;
239 bool ForceCaptureByReferenceInTargetExecutable =
false;
240 CriticalsWithHintsTy Criticals;
241 unsigned IgnoredStackElements = 0;
245 using const_iterator = StackTy::const_reverse_iterator;
246 const_iterator begin()
const {
247 return Stack.empty() ? const_iterator()
248 : Stack.back().first.rbegin() + IgnoredStackElements;
250 const_iterator end()
const {
251 return Stack.empty() ? const_iterator() : Stack.back().first.rend();
253 using iterator = StackTy::reverse_iterator;
255 return Stack.empty() ? iterator()
256 : Stack.back().first.rbegin() + IgnoredStackElements;
259 return Stack.empty() ? iterator() : Stack.back().first.rend();
264 bool isStackEmpty()
const {
265 return Stack.empty() ||
266 Stack.back().second != CurrentNonCapturingFunctionScope ||
267 Stack.back().first.size() <= IgnoredStackElements;
269 size_t getStackSize()
const {
270 return isStackEmpty() ? 0
271 : Stack.back().first.size() - IgnoredStackElements;
274 SharingMapTy *getTopOfStackOrNull() {
275 size_t Size = getStackSize();
278 return &Stack.back().first[
Size - 1];
280 const SharingMapTy *getTopOfStackOrNull()
const {
281 return const_cast<DSAStackTy &
>(*this).getTopOfStackOrNull();
283 SharingMapTy &getTopOfStack() {
284 assert(!isStackEmpty() &&
"no current directive");
285 return *getTopOfStackOrNull();
287 const SharingMapTy &getTopOfStack()
const {
288 return const_cast<DSAStackTy &
>(*this).getTopOfStack();
291 SharingMapTy *getSecondOnStackOrNull() {
292 size_t Size = getStackSize();
295 return &Stack.back().first[
Size - 2];
297 const SharingMapTy *getSecondOnStackOrNull()
const {
298 return const_cast<DSAStackTy &
>(*this).getSecondOnStackOrNull();
307 SharingMapTy &getStackElemAtLevel(
unsigned Level) {
308 assert(
Level < getStackSize() &&
"no such stack element");
309 return Stack.back().first[
Level];
311 const SharingMapTy &getStackElemAtLevel(
unsigned Level)
const {
312 return const_cast<DSAStackTy &
>(*this).getStackElemAtLevel(
Level);
315 DSAVarData getDSA(const_iterator &Iter,
ValueDecl *D)
const;
318 bool isOpenMPLocal(
VarDecl *D, const_iterator Iter)
const;
331 Expr *OMPPredefinedAllocators[OMPAllocateDeclAttr::OMPUserDefinedMemAlloc] = {
340 explicit DSAStackTy(
Sema &S) : SemaRef(S) {}
343 void setOMPAllocatorHandleT(
QualType Ty) { OMPAllocatorHandleT = Ty; }
345 QualType getOMPAllocatorHandleT()
const {
return OMPAllocatorHandleT; }
347 void setOMPAlloctraitT(
QualType Ty) { OMPAlloctraitT = Ty; }
349 QualType getOMPAlloctraitT()
const {
return OMPAlloctraitT; }
351 void setAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
353 OMPPredefinedAllocators[AllocatorKind] = Allocator;
356 Expr *getAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind)
const {
357 return OMPPredefinedAllocators[AllocatorKind];
360 void setOMPDependT(
QualType Ty) { OMPDependT = Ty; }
362 QualType getOMPDependT()
const {
return OMPDependT; }
365 void setOMPEventHandleT(
QualType Ty) { OMPEventHandleT = Ty; }
367 QualType getOMPEventHandleT()
const {
return OMPEventHandleT; }
369 bool isClauseParsingMode()
const {
return ClauseKindMode != OMPC_unknown; }
371 assert(isClauseParsingMode() &&
"Must be in clause parsing mode.");
372 return ClauseKindMode;
376 bool isBodyComplete()
const {
377 const SharingMapTy *Top = getTopOfStackOrNull();
378 return Top && Top->BodyComplete;
380 void setBodyComplete() { getTopOfStack().BodyComplete =
true; }
382 bool isForceVarCapturing()
const {
return ForceCapturing; }
383 void setForceVarCapturing(
bool V) { ForceCapturing =
V; }
385 void setForceCaptureByReferenceInTargetExecutable(
bool V) {
386 ForceCaptureByReferenceInTargetExecutable =
V;
388 bool isForceCaptureByReferenceInTargetExecutable()
const {
389 return ForceCaptureByReferenceInTargetExecutable;
394 assert(!IgnoredStackElements &&
395 "cannot change stack while ignoring elements");
397 Stack.back().second != CurrentNonCapturingFunctionScope)
398 Stack.emplace_back(StackTy(), CurrentNonCapturingFunctionScope);
399 Stack.back().first.emplace_back(DKind, DirName, CurScope, Loc);
400 Stack.back().first.back().DefaultAttrLoc = Loc;
404 assert(!IgnoredStackElements &&
405 "cannot change stack while ignoring elements");
406 assert(!Stack.back().first.empty() &&
407 "Data-sharing attributes stack is empty!");
408 Stack.back().first.pop_back();
413 class ParentDirectiveScope {
418 ParentDirectiveScope(DSAStackTy &Self,
bool Activate)
419 : Self(Self), Active(
false) {
423 ~ParentDirectiveScope() { disable(); }
426 --Self.IgnoredStackElements;
432 ++Self.IgnoredStackElements;
441 "Expected loop-based directive.");
442 getTopOfStack().LoopStart =
true;
447 "Expected loop-based directive.");
448 getTopOfStack().LoopStart =
false;
451 bool isLoopStarted()
const {
453 "Expected loop-based directive.");
454 return !getTopOfStack().LoopStart;
457 void resetPossibleLoopCounter(
const Decl *D =
nullptr) {
458 getTopOfStack().PossiblyLoopCounter = D ? D->getCanonicalDecl() : D;
461 const Decl *getPossiblyLoopCunter()
const {
462 return getTopOfStack().PossiblyLoopCounter;
465 void pushFunction() {
466 assert(!IgnoredStackElements &&
467 "cannot change stack while ignoring elements");
469 assert(!isa<CapturingScopeInfo>(CurFnScope));
470 CurrentNonCapturingFunctionScope = CurFnScope;
474 assert(!IgnoredStackElements &&
475 "cannot change stack while ignoring elements");
476 if (!Stack.empty() && Stack.back().second == OldFSI) {
477 assert(Stack.back().first.empty());
480 CurrentNonCapturingFunctionScope =
nullptr;
482 if (!isa<CapturingScopeInfo>(FSI)) {
483 CurrentNonCapturingFunctionScope = FSI;
492 const std::pair<const OMPCriticalDirective *, llvm::APSInt>
494 auto I = Criticals.find(Name.getAsString());
495 if (I != Criticals.end())
514 const LCDeclInfo isLoopControlVariable(
const ValueDecl *D)
const;
519 const LCDeclInfo isParentLoopControlVariable(
const ValueDecl *D)
const;
524 const LCDeclInfo isLoopControlVariable(
const ValueDecl *D,
525 unsigned Level)
const;
528 const ValueDecl *getParentLoopControlVariable(
unsigned I)
const;
531 void markDeclAsUsedInScanDirective(
ValueDecl *D) {
532 if (SharingMapTy *Stack = getSecondOnStackOrNull())
533 Stack->UsedInScanDirective.insert(D);
537 bool isUsedInScanDirective(
ValueDecl *D)
const {
538 if (
const SharingMapTy *Stack = getTopOfStackOrNull())
539 return Stack->UsedInScanDirective.contains(D);
545 DeclRefExpr *PrivateCopy =
nullptr,
unsigned Modifier = 0,
546 bool AppliedToPointee =
false);
555 const Expr *ReductionRef);
561 Expr *&TaskgroupDescriptor)
const;
566 const Expr *&ReductionRef,
567 Expr *&TaskgroupDescriptor)
const;
570 Expr *getTaskgroupReductionRef()
const {
571 assert((getTopOfStack().
Directive == OMPD_taskgroup ||
575 "taskgroup reference expression requested for non taskgroup or "
576 "parallel/worksharing directive.");
577 return getTopOfStack().TaskgroupReductionRef;
581 bool isTaskgroupReductionRef(
const ValueDecl *VD,
unsigned Level)
const {
582 return getStackElemAtLevel(
Level).TaskgroupReductionRef &&
583 cast<DeclRefExpr>(getStackElemAtLevel(
Level).TaskgroupReductionRef)
589 const DSAVarData getTopDSA(
ValueDecl *D,
bool FromParent);
591 const DSAVarData getImplicitDSA(
ValueDecl *D,
bool FromParent)
const;
593 const DSAVarData getImplicitDSA(
ValueDecl *D,
unsigned Level)
const;
600 DefaultDataSharingAttributes)>
603 bool FromParent)
const;
611 bool FromParent)
const;
618 unsigned Level,
bool NotLastprivate =
false)
const;
622 bool hasExplicitDirective(
624 unsigned Level)
const;
628 const llvm::function_ref<
bool(
631 bool FromParent)
const;
635 const SharingMapTy *Top = getTopOfStackOrNull();
636 return Top ? Top->Directive : OMPD_unknown;
640 assert(!isStackEmpty() &&
"No directive at specified level.");
641 return getStackElemAtLevel(
Level).Directive;
645 unsigned OpenMPCaptureLevel)
const {
648 return CaptureRegions[OpenMPCaptureLevel];
652 const SharingMapTy *
Parent = getSecondOnStackOrNull();
657 void addRequiresDecl(
OMPRequiresDecl *RD) { RequiresDecls.push_back(RD); }
660 template <
typename ClauseType>
bool hasRequiresDeclWithClause()
const {
663 return isa<ClauseType>(C);
671 bool IsDuplicate =
false;
674 for (
const OMPClause *CPrev : D->clauselists()) {
675 if (CNew->getClauseKind() == CPrev->getClauseKind()) {
676 SemaRef.
Diag(CNew->getBeginLoc(),
677 diag::err_omp_requires_clause_redeclaration)
678 << getOpenMPClauseName(CNew->getClauseKind());
679 SemaRef.
Diag(CPrev->getBeginLoc(),
680 diag::note_omp_requires_previous_clause)
681 << getOpenMPClauseName(CPrev->getClauseKind());
692 TargetLocations.push_back(LocStart);
698 AtomicLocation = Loc;
703 SourceLocation getAtomicDirectiveLoc()
const {
return AtomicLocation; }
707 return TargetLocations;
712 getTopOfStack().DefaultAttr = DSA_none;
713 getTopOfStack().DefaultAttrLoc = Loc;
717 getTopOfStack().DefaultAttr = DSA_shared;
718 getTopOfStack().DefaultAttrLoc = Loc;
722 getTopOfStack().DefaultAttr = DSA_private;
723 getTopOfStack().DefaultAttrLoc = Loc;
727 getTopOfStack().DefaultAttr = DSA_firstprivate;
728 getTopOfStack().DefaultAttrLoc = Loc;
733 DefaultmapInfo &DMI = getTopOfStack().DefaultmapMap[
Kind];
734 DMI.ImplicitBehavior = M;
740 return getTopOfStack()
741 .DefaultmapMap[OMPC_DEFAULTMAP_aggregate]
744 .DefaultmapMap[OMPC_DEFAULTMAP_scalar]
747 .DefaultmapMap[OMPC_DEFAULTMAP_pointer]
749 return getTopOfStack().DefaultmapMap[VariableCategory].ImplicitBehavior !=
754 return ConstructTraits;
759 ConstructTraits.append(Traits.begin(), Traits.end());
761 for (llvm::omp::TraitProperty Trait : llvm::reverse(Traits)) {
762 llvm::omp::TraitProperty Top = ConstructTraits.pop_back_val();
763 assert(Top == Trait &&
"Something left a trait on the stack!");
769 DefaultDataSharingAttributes getDefaultDSA(
unsigned Level)
const {
770 return getStackSize() <=
Level ? DSA_unspecified
771 : getStackElemAtLevel(
Level).DefaultAttr;
773 DefaultDataSharingAttributes getDefaultDSA()
const {
774 return isStackEmpty() ? DSA_unspecified : getTopOfStack().DefaultAttr;
777 return isStackEmpty() ?
SourceLocation() : getTopOfStack().DefaultAttrLoc;
781 return isStackEmpty()
783 : getTopOfStack().DefaultmapMap[
Kind].ImplicitBehavior;
786 getDefaultmapModifierAtLevel(
unsigned Level,
788 return getStackElemAtLevel(
Level).DefaultmapMap[
Kind].ImplicitBehavior;
790 bool isDefaultmapCapturedByRef(
unsigned Level,
793 getDefaultmapModifierAtLevel(
Level,
Kind);
794 if (
Kind == OMPC_DEFAULTMAP_scalar ||
Kind == OMPC_DEFAULTMAP_pointer) {
795 return (M == OMPC_DEFAULTMAP_MODIFIER_alloc) ||
796 (M == OMPC_DEFAULTMAP_MODIFIER_to) ||
797 (M == OMPC_DEFAULTMAP_MODIFIER_from) ||
798 (M == OMPC_DEFAULTMAP_MODIFIER_tofrom);
805 case OMPC_DEFAULTMAP_scalar:
806 case OMPC_DEFAULTMAP_pointer:
808 (M == OMPC_DEFAULTMAP_MODIFIER_firstprivate) ||
809 (M == OMPC_DEFAULTMAP_MODIFIER_default);
810 case OMPC_DEFAULTMAP_aggregate:
811 return M == OMPC_DEFAULTMAP_MODIFIER_firstprivate;
815 llvm_unreachable(
"Unexpected OpenMPDefaultmapClauseKind enum");
817 bool mustBeFirstprivateAtLevel(
unsigned Level,
820 getDefaultmapModifierAtLevel(
Level,
Kind);
821 return mustBeFirstprivateBase(M,
Kind);
825 return mustBeFirstprivateBase(M,
Kind);
829 bool isThreadPrivate(
VarDecl *D) {
830 const DSAVarData DVar = getTopDSA(D,
false);
835 void setOrderedRegion(
bool IsOrdered,
const Expr *Param,
838 getTopOfStack().OrderedRegion.emplace(Param, Clause);
840 getTopOfStack().OrderedRegion.reset();
844 bool isOrderedRegion()
const {
845 if (
const SharingMapTy *Top = getTopOfStackOrNull())
846 return Top->OrderedRegion.has_value();
850 std::pair<const Expr *, OMPOrderedClause *> getOrderedRegionParam()
const {
851 if (
const SharingMapTy *Top = getTopOfStackOrNull())
852 if (Top->OrderedRegion)
853 return *Top->OrderedRegion;
854 return std::make_pair(
nullptr,
nullptr);
858 bool isParentOrderedRegion()
const {
859 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
860 return Parent->OrderedRegion.has_value();
864 std::pair<const Expr *, OMPOrderedClause *>
865 getParentOrderedRegionParam()
const {
866 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
867 if (
Parent->OrderedRegion)
868 return *
Parent->OrderedRegion;
869 return std::make_pair(
nullptr,
nullptr);
872 void setRegionHasOrderConcurrent(
bool HasOrderConcurrent) {
873 getTopOfStack().RegionHasOrderConcurrent = HasOrderConcurrent;
877 bool isParentOrderConcurrent()
const {
878 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
879 return Parent->RegionHasOrderConcurrent;
883 void setNowaitRegion(
bool IsNowait =
true) {
884 getTopOfStack().NowaitRegion = IsNowait;
888 bool isParentNowaitRegion()
const {
889 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
890 return Parent->NowaitRegion;
894 void setUntiedRegion(
bool IsUntied =
true) {
895 getTopOfStack().UntiedRegion = IsUntied;
898 bool isUntiedRegion()
const {
899 const SharingMapTy *Top = getTopOfStackOrNull();
900 return Top ? Top->UntiedRegion :
false;
903 void setParentCancelRegion(
bool Cancel =
true) {
904 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
905 Parent->CancelRegion |= Cancel;
908 bool isCancelRegion()
const {
909 const SharingMapTy *Top = getTopOfStackOrNull();
910 return Top ? Top->CancelRegion :
false;
915 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
916 Parent->PrevScanLocation = Loc;
919 bool doesParentHasScanDirective()
const {
920 const SharingMapTy *Top = getSecondOnStackOrNull();
921 return Top ? Top->PrevScanLocation.isValid() :
false;
925 const SharingMapTy *Top = getSecondOnStackOrNull();
930 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
931 Parent->PrevOrderedLocation = Loc;
934 bool doesParentHasOrderedDirective()
const {
935 const SharingMapTy *Top = getSecondOnStackOrNull();
936 return Top ? Top->PrevOrderedLocation.isValid() :
false;
940 const SharingMapTy *Top = getSecondOnStackOrNull();
945 void setAssociatedLoops(
unsigned Val) {
946 getTopOfStack().AssociatedLoops = Val;
948 getTopOfStack().HasMutipleLoops =
true;
951 unsigned getAssociatedLoops()
const {
952 const SharingMapTy *Top = getTopOfStackOrNull();
953 return Top ? Top->AssociatedLoops : 0;
956 bool hasMutipleLoops()
const {
957 const SharingMapTy *Top = getTopOfStackOrNull();
958 return Top ? Top->HasMutipleLoops :
false;
964 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
965 Parent->InnerTeamsRegionLoc = TeamsRegionLoc;
968 bool hasInnerTeamsRegion()
const {
969 return getInnerTeamsRegionLoc().
isValid();
973 const SharingMapTy *Top = getTopOfStackOrNull();
977 Scope *getCurScope()
const {
978 const SharingMapTy *Top = getTopOfStackOrNull();
979 return Top ? Top->CurScope :
nullptr;
981 void setContext(
DeclContext *DC) { getTopOfStack().Context = DC; }
983 const SharingMapTy *Top = getTopOfStackOrNull();
989 bool checkMappableExprComponentListsForDecl(
990 const ValueDecl *VD,
bool CurrentRegionOnly,
991 const llvm::function_ref<
1003 if (CurrentRegionOnly)
1008 for (; SI != SE; ++SI) {
1009 auto MI = SI->MappedExprComponents.find(VD);
1010 if (MI != SI->MappedExprComponents.end())
1012 MI->second.Components)
1013 if (Check(L, MI->second.Kind))
1021 bool checkMappableExprComponentListsForDeclAtLevel(
1023 const llvm::function_ref<
1027 if (getStackSize() <=
Level)
1030 const SharingMapTy &StackElem = getStackElemAtLevel(
Level);
1031 auto MI = StackElem.MappedExprComponents.find(VD);
1032 if (MI != StackElem.MappedExprComponents.end())
1034 MI->second.Components)
1035 if (Check(L, MI->second.Kind))
1042 void addMappableExpressionComponents(
1046 MappedExprComponentTy &MEC = getTopOfStack().MappedExprComponents[VD];
1048 MEC.Components.resize(MEC.Components.size() + 1);
1049 MEC.Components.back().append(Components.begin(), Components.end());
1050 MEC.Kind = WhereFoundClauseKind;
1053 unsigned getNestingLevel()
const {
1054 assert(!isStackEmpty());
1055 return getStackSize() - 1;
1058 const OperatorOffsetTy &OpsOffs) {
1059 SharingMapTy *
Parent = getSecondOnStackOrNull();
1061 Parent->DoacrossDepends.try_emplace(C, OpsOffs);
1063 llvm::iterator_range<DoacrossDependMapTy::const_iterator>
1064 getDoacrossDependClauses()
const {
1065 const SharingMapTy &StackElem = getTopOfStack();
1067 const DoacrossDependMapTy &Ref = StackElem.DoacrossDepends;
1068 return llvm::make_range(Ref.begin(), Ref.end());
1070 return llvm::make_range(StackElem.DoacrossDepends.end(),
1071 StackElem.DoacrossDepends.end());
1075 void addMappedClassesQualTypes(
QualType QT) {
1076 SharingMapTy &StackElem = getTopOfStack();
1077 StackElem.MappedClassesQualTypes.insert(QT);
1081 bool isClassPreviouslyMapped(
QualType QT)
const {
1082 const SharingMapTy &StackElem = getTopOfStack();
1083 return StackElem.MappedClassesQualTypes.contains(QT);
1087 void addToParentTargetRegionLinkGlobals(
DeclRefExpr *E) {
1088 assert(*OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(
1089 E->
getDecl()) == OMPDeclareTargetDeclAttr::MT_Link &&
1090 "Expected declare target link global.");
1091 for (
auto &Elem : *
this) {
1093 Elem.DeclareTargetLinkVarDecls.push_back(E);
1103 "Expected target executable directive.");
1104 return getTopOfStack().DeclareTargetLinkVarDecls;
1108 void addInnerAllocatorExpr(
Expr *E) {
1109 getTopOfStack().InnerUsedAllocators.push_back(E);
1113 return getTopOfStack().InnerUsedAllocators;
1117 void addImplicitTaskFirstprivate(
unsigned Level,
Decl *D) {
1118 getStackElemAtLevel(
Level).ImplicitTaskFirstprivates.insert(D);
1121 bool isImplicitTaskFirstprivate(
Decl *D)
const {
1122 return getTopOfStack().ImplicitTaskFirstprivates.contains(D);
1126 void addUsesAllocatorsDecl(
const Decl *D, UsesAllocatorsDeclKind
Kind) {
1127 getTopOfStack().UsesAllocatorsDecls.try_emplace(D,
Kind);
1131 std::optional<UsesAllocatorsDeclKind>
1132 isUsesAllocatorsDecl(
unsigned Level,
const Decl *D)
const {
1133 const SharingMapTy &StackElem = getTopOfStack();
1134 auto I = StackElem.UsesAllocatorsDecls.find(D);
1135 if (I == StackElem.UsesAllocatorsDecls.end())
1136 return std::nullopt;
1137 return I->getSecond();
1139 std::optional<UsesAllocatorsDeclKind>
1140 isUsesAllocatorsDecl(
const Decl *D)
const {
1141 const SharingMapTy &StackElem = getTopOfStack();
1142 auto I = StackElem.UsesAllocatorsDecls.find(D);
1143 if (I == StackElem.UsesAllocatorsDecls.end())
1144 return std::nullopt;
1145 return I->getSecond();
1148 void addDeclareMapperVarRef(
Expr *Ref) {
1149 SharingMapTy &StackElem = getTopOfStack();
1150 StackElem.DeclareMapperVar = Ref;
1152 const Expr *getDeclareMapperVarRef()
const {
1153 const SharingMapTy *Top = getTopOfStackOrNull();
1154 return Top ? Top->DeclareMapperVar :
nullptr;
1158 void addIteratorVarDecl(
VarDecl *VD) {
1159 SharingMapTy &StackElem = getTopOfStack();
1163 bool isIteratorVarDecl(
const VarDecl *VD)
const {
1164 const SharingMapTy *Top = getTopOfStackOrNull();
1168 return llvm::any_of(Top->IteratorVarDecls, [VD](
const VarDecl *IteratorVD) {
1169 return IteratorVD == VD->getCanonicalDecl();
1174 const_iterator I = begin();
1175 const_iterator EndI = end();
1176 size_t StackLevel = getStackSize();
1177 for (; I != EndI; ++I) {
1178 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1182 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1185 for (
const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1186 if (IFD.FD == FD && IFD.StackLevel == StackLevel)
1191 bool isImplicitDefaultFirstprivateFD(
VarDecl *VD)
const {
1192 const_iterator I = begin();
1193 const_iterator EndI = end();
1194 for (; I != EndI; ++I)
1195 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1199 for (
const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1206 iterator I = begin();
1207 const_iterator EndI = end();
1208 size_t StackLevel = getStackSize();
1209 for (; I != EndI; ++I) {
1210 if (I->DefaultAttr == DSA_private || I->DefaultAttr == DSA_firstprivate) {
1211 I->ImplicitDefaultFirstprivateFDs.emplace_back(FD, StackLevel, VD);
1216 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1226 DKind == OMPD_unknown;
1232 if (
const auto *FE = dyn_cast<FullExpr>(E))
1233 E = FE->getSubExpr();
1235 if (
const auto *MTE = dyn_cast<MaterializeTemporaryExpr>(E))
1236 E = MTE->getSubExpr();
1238 while (
const auto *Binder = dyn_cast<CXXBindTemporaryExpr>(E))
1239 E = Binder->getSubExpr();
1241 if (
const auto *ICE = dyn_cast<ImplicitCastExpr>(E))
1242 E = ICE->getSubExprAsWritten();
1251 if (
const auto *CED = dyn_cast<OMPCapturedExprDecl>(D))
1252 if (
const auto *ME = dyn_cast<MemberExpr>(
getExprAsWritten(CED->getInit())))
1253 D = ME->getMemberDecl();
1254 const auto *VD = dyn_cast<VarDecl>(D);
1255 const auto *FD = dyn_cast<FieldDecl>(D);
1256 if (VD !=
nullptr) {
1272 DSAStackTy::DSAVarData DSAStackTy::getDSA(const_iterator &Iter,
1275 auto *VD = dyn_cast<VarDecl>(D);
1276 const auto *FD = dyn_cast<FieldDecl>(D);
1278 if (Iter == end()) {
1285 DVar.CKind = OMPC_shared;
1292 DVar.CKind = OMPC_shared;
1296 DVar.CKind = OMPC_shared;
1307 DVar.CKind = OMPC_private;
1311 DVar.DKind = Iter->Directive;
1314 if (Iter->SharingMap.count(D)) {
1315 const DSAInfo &
Data = Iter->SharingMap.lookup(D);
1316 DVar.RefExpr =
Data.RefExpr.getPointer();
1317 DVar.PrivateCopy =
Data.PrivateCopy;
1318 DVar.CKind =
Data.Attributes;
1319 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1320 DVar.Modifier =
Data.Modifier;
1321 DVar.AppliedToPointee =
Data.AppliedToPointee;
1329 switch (Iter->DefaultAttr) {
1331 DVar.CKind = OMPC_shared;
1332 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1336 case DSA_firstprivate:
1339 DVar.CKind = OMPC_unknown;
1341 DVar.CKind = OMPC_firstprivate;
1343 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1351 DVar.CKind = OMPC_unknown;
1353 DVar.CKind = OMPC_private;
1355 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1357 case DSA_unspecified:
1362 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1366 DVar.CKind = OMPC_shared;
1376 DSAVarData DVarTemp;
1377 const_iterator I = Iter, E = end();
1385 DVarTemp = getDSA(I, D);
1386 if (DVarTemp.CKind != OMPC_shared) {
1387 DVar.RefExpr =
nullptr;
1388 DVar.CKind = OMPC_firstprivate;
1391 }
while (I != E && !isImplicitTaskingRegion(I->Directive));
1393 (DVarTemp.CKind == OMPC_unknown) ? OMPC_firstprivate : OMPC_shared;
1402 return getDSA(++Iter, D);
1405 const Expr *DSAStackTy::addUniqueAligned(
const ValueDecl *D,
1406 const Expr *NewDE) {
1407 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1409 SharingMapTy &StackElem = getTopOfStack();
1410 auto It = StackElem.AlignedMap.find(D);
1411 if (It == StackElem.AlignedMap.end()) {
1412 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1413 StackElem.AlignedMap[D] = NewDE;
1416 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1420 const Expr *DSAStackTy::addUniqueNontemporal(
const ValueDecl *D,
1421 const Expr *NewDE) {
1422 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1424 SharingMapTy &StackElem = getTopOfStack();
1425 auto It = StackElem.NontemporalMap.find(D);
1426 if (It == StackElem.NontemporalMap.end()) {
1427 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1428 StackElem.NontemporalMap[D] = NewDE;
1431 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1436 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1438 SharingMapTy &StackElem = getTopOfStack();
1439 StackElem.LCVMap.try_emplace(
1440 D, LCDeclInfo(StackElem.LCVMap.size() + 1,
Capture));
1443 const DSAStackTy::LCDeclInfo
1444 DSAStackTy::isLoopControlVariable(
const ValueDecl *D)
const {
1445 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1447 const SharingMapTy &StackElem = getTopOfStack();
1448 auto It = StackElem.LCVMap.find(D);
1449 if (It != StackElem.LCVMap.end())
1451 return {0,
nullptr};
1454 const DSAStackTy::LCDeclInfo
1455 DSAStackTy::isLoopControlVariable(
const ValueDecl *D,
unsigned Level)
const {
1456 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1458 for (
unsigned I =
Level + 1; I > 0; --I) {
1459 const SharingMapTy &StackElem = getStackElemAtLevel(I - 1);
1460 auto It = StackElem.LCVMap.find(D);
1461 if (It != StackElem.LCVMap.end())
1464 return {0,
nullptr};
1467 const DSAStackTy::LCDeclInfo
1468 DSAStackTy::isParentLoopControlVariable(
const ValueDecl *D)
const {
1469 const SharingMapTy *
Parent = getSecondOnStackOrNull();
1470 assert(
Parent &&
"Data-sharing attributes stack is empty");
1472 auto It =
Parent->LCVMap.find(D);
1473 if (It !=
Parent->LCVMap.end())
1475 return {0,
nullptr};
1478 const ValueDecl *DSAStackTy::getParentLoopControlVariable(
unsigned I)
const {
1479 const SharingMapTy *
Parent = getSecondOnStackOrNull();
1480 assert(
Parent &&
"Data-sharing attributes stack is empty");
1481 if (
Parent->LCVMap.size() < I)
1483 for (
const auto &Pair :
Parent->LCVMap)
1484 if (Pair.second.first == I)
1491 bool AppliedToPointee) {
1493 if (A == OMPC_threadprivate) {
1494 DSAInfo &
Data = Threadprivates[D];
1495 Data.Attributes = A;
1496 Data.RefExpr.setPointer(E);
1497 Data.PrivateCopy =
nullptr;
1498 Data.Modifier = Modifier;
1500 DSAInfo &
Data = getTopOfStack().SharingMap[D];
1501 assert(
Data.Attributes == OMPC_unknown || (A ==
Data.Attributes) ||
1502 (A == OMPC_firstprivate &&
Data.Attributes == OMPC_lastprivate) ||
1503 (A == OMPC_lastprivate &&
Data.Attributes == OMPC_firstprivate) ||
1504 (isLoopControlVariable(D).first && A == OMPC_private));
1505 Data.Modifier = Modifier;
1506 if (A == OMPC_lastprivate &&
Data.Attributes == OMPC_firstprivate) {
1507 Data.RefExpr.setInt(
true);
1510 const bool IsLastprivate =
1511 A == OMPC_lastprivate ||
Data.Attributes == OMPC_lastprivate;
1512 Data.Attributes = A;
1513 Data.RefExpr.setPointerAndInt(E, IsLastprivate);
1514 Data.PrivateCopy = PrivateCopy;
1515 Data.AppliedToPointee = AppliedToPointee;
1517 DSAInfo &
Data = getTopOfStack().SharingMap[PrivateCopy->
getDecl()];
1518 Data.Modifier = Modifier;
1519 Data.Attributes = A;
1520 Data.RefExpr.setPointerAndInt(PrivateCopy, IsLastprivate);
1521 Data.PrivateCopy =
nullptr;
1522 Data.AppliedToPointee = AppliedToPointee;
1529 StringRef Name,
const AttrVec *Attrs =
nullptr,
1544 OMPReferencedVarAttr::CreateImplicit(SemaRef.
Context, OrigRef));
1551 bool RefersToCapture =
false) {
1562 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1564 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1565 "Additional reduction info may be specified only for reduction items.");
1566 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1567 assert(ReductionData.ReductionRange.isInvalid() &&
1568 (getTopOfStack().
Directive == OMPD_taskgroup ||
1572 "Additional reduction info may be specified only once for reduction "
1574 ReductionData.set(BOK, SR);
1575 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1576 if (!TaskgroupReductionRef) {
1579 TaskgroupReductionRef =
1585 const Expr *ReductionRef) {
1587 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1589 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1590 "Additional reduction info may be specified only for reduction items.");
1591 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1592 assert(ReductionData.ReductionRange.isInvalid() &&
1593 (getTopOfStack().
Directive == OMPD_taskgroup ||
1597 "Additional reduction info may be specified only once for reduction "
1599 ReductionData.set(ReductionRef, SR);
1600 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1601 if (!TaskgroupReductionRef) {
1604 TaskgroupReductionRef =
1609 const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1611 Expr *&TaskgroupDescriptor)
const {
1613 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1614 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1615 const DSAInfo &
Data = I->SharingMap.lookup(D);
1616 if (
Data.Attributes != OMPC_reduction ||
1617 Data.Modifier != OMPC_REDUCTION_task)
1619 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1620 if (!ReductionData.ReductionOp ||
1621 ReductionData.ReductionOp.is<
const Expr *>())
1622 return DSAVarData();
1623 SR = ReductionData.ReductionRange;
1624 BOK = ReductionData.ReductionOp.get<ReductionData::BOKPtrType>();
1625 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1626 "expression for the descriptor is not "
1628 TaskgroupDescriptor = I->TaskgroupReductionRef;
1629 return DSAVarData(I->Directive, OMPC_reduction,
Data.RefExpr.getPointer(),
1630 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1633 return DSAVarData();
1636 const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1638 Expr *&TaskgroupDescriptor)
const {
1640 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1641 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1642 const DSAInfo &
Data = I->SharingMap.lookup(D);
1643 if (
Data.Attributes != OMPC_reduction ||
1644 Data.Modifier != OMPC_REDUCTION_task)
1646 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1647 if (!ReductionData.ReductionOp ||
1648 !ReductionData.ReductionOp.is<
const Expr *>())
1649 return DSAVarData();
1650 SR = ReductionData.ReductionRange;
1651 ReductionRef = ReductionData.ReductionOp.get<
const Expr *>();
1652 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1653 "expression for the descriptor is not "
1655 TaskgroupDescriptor = I->TaskgroupReductionRef;
1656 return DSAVarData(I->Directive, OMPC_reduction,
Data.RefExpr.getPointer(),
1657 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1660 return DSAVarData();
1663 bool DSAStackTy::isOpenMPLocal(
VarDecl *D, const_iterator I)
const {
1665 for (const_iterator E = end(); I != E; ++I) {
1666 if (isImplicitOrExplicitTaskingRegion(I->Directive) ||
1670 Scope *CurScope = getCurScope();
1671 while (CurScope && CurScope != TopScope && !CurScope->
isDeclScope(D))
1673 return CurScope != TopScope;
1676 if (I->Context == DC)
1685 bool AcceptIfMutable =
true,
1686 bool *IsClassType =
nullptr) {
1688 Type =
Type.getNonReferenceType().getCanonicalType();
1689 bool IsConstant =
Type.isConstant(Context);
1694 if (
const auto *CTSD = dyn_cast_or_null<ClassTemplateSpecializationDecl>(RD))
1696 RD = CTD->getTemplatedDecl();
1699 return IsConstant && !(SemaRef.
getLangOpts().CPlusPlus && RD &&
1706 bool AcceptIfMutable =
true,
1707 bool ListItemNotVar =
false) {
1711 unsigned Diag = ListItemNotVar ? diag::err_omp_const_list_item
1712 : IsClassType ? diag::err_omp_const_not_mutable_variable
1713 : diag::err_omp_const_variable;
1714 SemaRef.
Diag(ELoc,
Diag) << getOpenMPClauseName(CKind);
1715 if (!ListItemNotVar && D) {
1716 const VarDecl *VD = dyn_cast<VarDecl>(D);
1720 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
1728 const DSAStackTy::DSAVarData DSAStackTy::getTopDSA(
ValueDecl *D,
1733 auto *VD = dyn_cast<VarDecl>(D);
1734 auto TI = Threadprivates.find(D);
1735 if (TI != Threadprivates.end()) {
1736 DVar.RefExpr = TI->getSecond().RefExpr.getPointer();
1737 DVar.CKind = OMPC_threadprivate;
1738 DVar.Modifier = TI->getSecond().Modifier;
1741 if (VD && VD->
hasAttr<OMPThreadPrivateDeclAttr>()) {
1744 VD->
getAttr<OMPThreadPrivateDeclAttr>()->getLocation());
1745 DVar.CKind = OMPC_threadprivate;
1746 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1753 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
1760 DVar.CKind = OMPC_threadprivate;
1761 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1766 !isLoopControlVariable(D).first) {
1767 const_iterator IterTarget =
1768 std::find_if(begin(), end(), [](
const SharingMapTy &
Data) {
1771 if (IterTarget != end()) {
1772 const_iterator ParentIterTarget = IterTarget + 1;
1773 for (const_iterator Iter = begin(); Iter != ParentIterTarget; ++Iter) {
1774 if (isOpenMPLocal(VD, Iter)) {
1778 DVar.CKind = OMPC_threadprivate;
1782 if (!isClauseParsingMode() || IterTarget != begin()) {
1783 auto DSAIter = IterTarget->SharingMap.find(D);
1784 if (DSAIter != IterTarget->SharingMap.end() &&
1786 DVar.RefExpr = DSAIter->getSecond().RefExpr.getPointer();
1787 DVar.CKind = OMPC_threadprivate;
1790 const_iterator
End = end();
1796 IterTarget->ConstructLoc);
1797 DVar.CKind = OMPC_threadprivate;
1817 const_iterator I = begin();
1818 const_iterator EndI = end();
1819 if (FromParent && I != EndI)
1822 auto It = I->SharingMap.find(D);
1823 if (It != I->SharingMap.end()) {
1824 const DSAInfo &
Data = It->getSecond();
1825 DVar.RefExpr =
Data.RefExpr.getPointer();
1826 DVar.PrivateCopy =
Data.PrivateCopy;
1827 DVar.CKind =
Data.Attributes;
1828 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1829 DVar.DKind = I->Directive;
1830 DVar.Modifier =
Data.Modifier;
1831 DVar.AppliedToPointee =
Data.AppliedToPointee;
1836 DVar.CKind = OMPC_shared;
1843 if (SemaRef.
LangOpts.OpenMP <= 31) {
1851 DSAVarData DVarTemp = hasInnermostDSA(
1854 return C == OMPC_firstprivate ||
C == OMPC_shared;
1856 MatchesAlways, FromParent);
1857 if (DVarTemp.CKind != OMPC_unknown && DVarTemp.RefExpr)
1860 DVar.CKind = OMPC_shared;
1867 const_iterator I = begin();
1868 const_iterator EndI = end();
1869 if (FromParent && I != EndI)
1873 auto It = I->SharingMap.find(D);
1874 if (It != I->SharingMap.end()) {
1875 const DSAInfo &
Data = It->getSecond();
1876 DVar.RefExpr =
Data.RefExpr.getPointer();
1877 DVar.PrivateCopy =
Data.PrivateCopy;
1878 DVar.CKind =
Data.Attributes;
1879 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1880 DVar.DKind = I->Directive;
1881 DVar.Modifier =
Data.Modifier;
1882 DVar.AppliedToPointee =
Data.AppliedToPointee;
1888 const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(
ValueDecl *D,
1889 bool FromParent)
const {
1890 if (isStackEmpty()) {
1892 return getDSA(I, D);
1895 const_iterator StartI = begin();
1896 const_iterator EndI = end();
1897 if (FromParent && StartI != EndI)
1899 return getDSA(StartI, D);
1902 const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(
ValueDecl *D,
1903 unsigned Level)
const {
1904 if (getStackSize() <=
Level)
1905 return DSAVarData();
1907 const_iterator StartI = std::next(begin(), getStackSize() - 1 -
Level);
1908 return getDSA(StartI, D);
1911 const DSAStackTy::DSAVarData
1914 DefaultDataSharingAttributes)>
1917 bool FromParent)
const {
1921 const_iterator I = begin();
1922 const_iterator EndI = end();
1923 if (FromParent && I != EndI)
1925 for (; I != EndI; ++I) {
1926 if (!DPred(I->Directive) &&
1927 !isImplicitOrExplicitTaskingRegion(I->Directive))
1929 const_iterator NewI = I;
1930 DSAVarData DVar = getDSA(NewI, D);
1931 if (I == NewI && CPred(DVar.CKind, DVar.AppliedToPointee, I->DefaultAttr))
1937 const DSAStackTy::DSAVarData DSAStackTy::hasInnermostDSA(
1940 bool FromParent)
const {
1944 const_iterator StartI = begin();
1945 const_iterator EndI = end();
1946 if (FromParent && StartI != EndI)
1948 if (StartI == EndI || !DPred(StartI->Directive))
1950 const_iterator NewI = StartI;
1951 DSAVarData DVar = getDSA(NewI, D);
1952 return (NewI == StartI && CPred(DVar.CKind, DVar.AppliedToPointee))
1957 bool DSAStackTy::hasExplicitDSA(
1960 unsigned Level,
bool NotLastprivate)
const {
1961 if (getStackSize() <=
Level)
1964 const SharingMapTy &StackElem = getStackElemAtLevel(
Level);
1965 auto I = StackElem.SharingMap.find(D);
1966 if (I != StackElem.SharingMap.end() && I->getSecond().RefExpr.getPointer() &&
1967 CPred(I->getSecond().Attributes, I->getSecond().AppliedToPointee) &&
1968 (!NotLastprivate || !I->getSecond().RefExpr.getInt()))
1971 auto LI = StackElem.LCVMap.find(D);
1972 if (LI != StackElem.LCVMap.end())
1973 return CPred(OMPC_private,
false);
1977 bool DSAStackTy::hasExplicitDirective(
1979 unsigned Level)
const {
1980 if (getStackSize() <=
Level)
1982 const SharingMapTy &StackElem = getStackElemAtLevel(
Level);
1983 return DPred(StackElem.Directive);
1986 bool DSAStackTy::hasDirective(
1990 bool FromParent)
const {
1992 size_t Skip = FromParent ? 2 : 1;
1993 for (const_iterator I = begin() +
std::min(Skip, getStackSize()), E = end();
1995 if (DPred(I->Directive, I->DirectiveName, I->ConstructLoc))
2001 void Sema::InitDataSharingAttributesStack() {
2002 VarDataSharingAttributesStack =
new DSAStackTy(*
this);
2005 #define DSAStack static_cast<DSAStackTy *>(VarDataSharingAttributesStack)
2007 void Sema::pushOpenMPFunctionRegion() {
DSAStack->pushFunction(); }
2015 "Expected OpenMP device compilation.");
2021 enum class FunctionEmissionStatus {
2031 assert(LangOpts.OpenMP && LangOpts.OpenMPIsDevice &&
2032 "Expected OpenMP device compilation.");
2038 case FunctionEmissionStatus::Emitted:
2039 Kind = SemaDiagnosticBuilder::K_Immediate;
2050 ? SemaDiagnosticBuilder::K_Deferred
2051 : SemaDiagnosticBuilder::K_Immediate;
2053 case FunctionEmissionStatus::TemplateDiscarded:
2054 case FunctionEmissionStatus::OMPDiscarded:
2055 Kind = SemaDiagnosticBuilder::K_Nop;
2057 case FunctionEmissionStatus::CUDADiscarded:
2058 llvm_unreachable(
"CUDADiscarded unexpected in OpenMP device compilation");
2069 assert(LangOpts.OpenMP && !LangOpts.OpenMPIsDevice &&
2070 "Expected OpenMP host compilation.");
2076 case FunctionEmissionStatus::Emitted:
2077 Kind = SemaDiagnosticBuilder::K_Immediate;
2080 Kind = SemaDiagnosticBuilder::K_Deferred;
2082 case FunctionEmissionStatus::TemplateDiscarded:
2083 case FunctionEmissionStatus::OMPDiscarded:
2084 case FunctionEmissionStatus::CUDADiscarded:
2085 Kind = SemaDiagnosticBuilder::K_Nop;
2095 if (LO.OpenMP <= 45) {
2097 return OMPC_DEFAULTMAP_scalar;
2098 return OMPC_DEFAULTMAP_aggregate;
2101 return OMPC_DEFAULTMAP_pointer;
2103 return OMPC_DEFAULTMAP_scalar;
2104 return OMPC_DEFAULTMAP_aggregate;
2108 unsigned OpenMPCaptureLevel)
const {
2109 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2112 bool IsByRef =
true;
2118 bool IsVariableUsedInMapClause =
false;
2180 bool IsVariableAssociatedWithSection =
false;
2182 DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2184 [&IsVariableUsedInMapClause, &IsVariableAssociatedWithSection,
2191 if (WhereFoundClauseKind != OMPC_map &&
2192 WhereFoundClauseKind != OMPC_has_device_addr)
2195 auto EI = MapExprComponents.rbegin();
2196 auto EE = MapExprComponents.rend();
2198 assert(EI != EE &&
"Invalid map expression!");
2200 if (isa<DeclRefExpr>(EI->getAssociatedExpression()))
2201 IsVariableUsedInMapClause |= EI->getAssociatedDeclaration() == D;
2207 if (isa<ArraySubscriptExpr>(EI->getAssociatedExpression()) ||
2208 isa<OMPArraySectionExpr>(EI->getAssociatedExpression()) ||
2209 isa<MemberExpr>(EI->getAssociatedExpression()) ||
2210 isa<OMPArrayShapingExpr>(EI->getAssociatedExpression())) {
2211 IsVariableAssociatedWithSection =
true;
2220 if (IsVariableUsedInMapClause) {
2223 IsByRef = !(Ty->
isPointerType() && IsVariableAssociatedWithSection);
2228 IsByRef = (
DSAStack->isForceCaptureByReferenceInTargetExecutable() &&
2231 DSAStack->isDefaultmapCapturedByRef(
2236 return K == OMPC_reduction && !AppliedToPointee;
2244 ((IsVariableUsedInMapClause &&
2250 return K == OMPC_firstprivate ||
2251 (K == OMPC_reduction && AppliedToPointee);
2257 !(isa<OMPCapturedExprDecl>(D) && !D->
hasAttr<OMPCaptureNoInitAttr>() &&
2258 !cast<OMPCapturedExprDecl>(D)->getInit()->isGLValue()) &&
2261 !((
DSAStack->getDefaultDSA() == DSA_firstprivate ||
2262 DSAStack->getDefaultDSA() == DSA_private) &&
2283 unsigned Sema::getOpenMPNestingLevel()
const {
2284 assert(getLangOpts().OpenMP);
2285 return DSAStack->getNestingLevel();
2295 !
DSAStack->isClauseParsingMode()) ||
2306 if (!dyn_cast<FieldDecl>(D))
2308 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2311 DefaultDataSharingAttributes DefaultAttr) {
2313 (DefaultAttr == DSA_firstprivate || DefaultAttr == DSA_private);
2317 if (DVarPrivate.CKind != OMPC_unknown)
2323 Expr *CaptureExpr,
bool WithInit,
2329 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2332 auto *VD = dyn_cast<VarDecl>(D);
2341 DSAStackTy::ParentDirectiveScope InParentDirectiveRAII(
2348 (getCurCapturedRegion() || getCurBlock() || getCurLambda())) {
2349 if (isInOpenMPTargetExecutionDirective()) {
2350 DSAStackTy::DSAVarData DVarTop =
2352 if (DVarTop.CKind != OMPC_unknown && DVarTop.RefExpr)
2357 if (OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2361 llvm::reverse(FunctionScopes),
2362 CheckScopeInfo ? (FunctionScopes.size() - (StopAt + 1)) : 0)) {
2363 if (!isa<CapturingScopeInfo>(FSI))
2365 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2371 assert(CSI &&
"Failed to find CapturedRegionScopeInfo");
2378 if (isInOpenMPDeclareTargetContext()) {
2381 if (LangOpts.OpenMP <= 45 &&
2382 !OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2383 checkDeclIsAllowedInOpenMPTarget(
nullptr, VD);
2388 if (CheckScopeInfo) {
2389 bool OpenMPFound =
false;
2390 for (
unsigned I = StopAt + 1; I > 0; --I) {
2392 if (!isa<CapturingScopeInfo>(FSI))
2394 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2404 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2405 (!
DSAStack->isClauseParsingMode() ||
2406 DSAStack->getParentDirective() != OMPD_unknown)) {
2407 auto &&Info =
DSAStack->isLoopControlVariable(D);
2410 isImplicitOrExplicitTaskingRegion(
DSAStack->getCurrentDirective())) ||
2411 (VD &&
DSAStack->isForceVarCapturing()))
2412 return VD ? VD : Info.second;
2413 DSAStackTy::DSAVarData DVarTop =
2415 if (DVarTop.CKind != OMPC_unknown &&
isOpenMPPrivate(DVarTop.CKind) &&
2417 return VD ? VD : cast<VarDecl>(DVarTop.PrivateCopy->getDecl());
2423 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2431 if (VD && !VD->
hasLocalStorage() && DVarPrivate.CKind == OMPC_unknown &&
2432 ((
DSAStack->getDefaultDSA() != DSA_none &&
2433 DSAStack->getDefaultDSA() != DSA_private &&
2434 DSAStack->getDefaultDSA() != DSA_firstprivate) ||
2435 DVarTop.CKind == OMPC_shared))
2437 auto *FD = dyn_cast<FieldDecl>(D);
2438 if (DVarPrivate.CKind != OMPC_unknown && !VD && FD &&
2439 !DVarPrivate.PrivateCopy) {
2440 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2443 DefaultDataSharingAttributes DefaultAttr) {
2445 (DefaultAttr == DSA_firstprivate ||
2446 DefaultAttr == DSA_private);
2450 if (DVarPrivate.CKind == OMPC_unknown)
2456 if (getCurrentThisType().isNull())
2468 *
this, FD->
getIdentifier(), ME, DVarPrivate.CKind != OMPC_private,
2469 CurContext->getParent(),
false);
2472 VD = cast<VarDecl>(VDPrivateRefExpr->
getDecl());
2473 DSAStack->addImplicitDefaultFirstprivateFD(FD, VD);
2476 if (DVarPrivate.CKind != OMPC_unknown ||
2477 (VD && (
DSAStack->getDefaultDSA() == DSA_none ||
2478 DSAStack->getDefaultDSA() == DSA_private ||
2479 DSAStack->getDefaultDSA() == DSA_firstprivate)))
2480 return VD ? VD : cast<VarDecl>(DVarPrivate.PrivateCopy->getDecl());
2485 void Sema::adjustOpenMPTargetScopeIndex(
unsigned &FunctionScopesIndex,
2486 unsigned Level)
const {
2487 FunctionScopesIndex -= getOpenMPCaptureLevels(
DSAStack->getDirective(
Level));
2491 assert(LangOpts.OpenMP &&
"OpenMP must be enabled.");
2497 assert(LangOpts.OpenMP &&
"OpenMP must be enabled.");
2499 DSAStack->resetPossibleLoopCounter();
2505 unsigned CapLevel)
const {
2506 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2507 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2508 (!
DSAStack->isClauseParsingMode() ||
2509 DSAStack->getParentDirective() != OMPD_unknown)) {
2510 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2513 DefaultDataSharingAttributes DefaultAttr) {
2515 DefaultAttr == DSA_private;
2519 if (DVarPrivate.CKind == OMPC_private && isa<OMPCapturedExprDecl>(D) &&
2520 DSAStack->isImplicitDefaultFirstprivateFD(cast<VarDecl>(D)) &&
2521 !
DSAStack->isLoopControlVariable(D).first)
2522 return OMPC_private;
2525 bool IsTriviallyCopyable =
2535 (IsTriviallyCopyable ||
2541 return OMPC_firstprivate;
2542 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(D,
Level);
2543 if (DVar.CKind != OMPC_shared &&
2544 !
DSAStack->isLoopControlVariable(D,
Level).first && !DVar.RefExpr) {
2546 return OMPC_firstprivate;
2552 DSAStack->resetPossibleLoopCounter(D);
2554 return OMPC_private;
2557 DSAStack->isLoopControlVariable(D).first) &&
2562 return OMPC_private;
2564 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
2570 return OMPC_private;
2576 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
2577 DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator)
2578 return OMPC_private;
2582 (
DSAStack->isClauseParsingMode() &&
2583 DSAStack->getClauseParsingMode() == OMPC_private) ||
2588 return K == OMPD_taskgroup ||
2589 ((isOpenMPParallelDirective(K) ||
2590 isOpenMPWorksharingDirective(K)) &&
2591 !isOpenMPSimdDirective(K));
2601 assert(LangOpts.OpenMP &&
"OpenMP is not allowed");
2604 for (
unsigned I =
DSAStack->getNestingLevel() + 1; I >
Level; --I) {
2605 const unsigned NewLevel = I - 1;
2609 if (isOpenMPPrivate(K) && !AppliedToPointee) {
2617 if (
DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2627 if (
DSAStack->mustBeFirstprivateAtLevel(
2629 OMPC = OMPC_firstprivate;
2633 if (OMPC != OMPC_unknown)
2634 FD->
addAttr(OMPCaptureKindAttr::CreateImplicit(Context,
unsigned(OMPC)));
2638 unsigned CaptureLevel)
const {
2639 assert(
LangOpts.OpenMP &&
"OpenMP is not allowed");
2644 const auto *VD = dyn_cast<VarDecl>(D);
2648 Regions[CaptureLevel] != OMPD_task;
2652 unsigned CaptureLevel)
const {
2653 assert(
LangOpts.OpenMP &&
"OpenMP is not allowed");
2656 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
2660 DSAStackTy::DSAVarData TopDVar =
2662 unsigned NumLevels =
2667 return (NumLevels == CaptureLevel + 1 &&
2668 (TopDVar.CKind != OMPC_shared ||
2669 DSAStack->getDefaultDSA() == DSA_firstprivate));
2672 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(D,
Level);
2673 if (DVar.CKind != OMPC_shared)
2675 }
while (
Level > 0);
2681 void Sema::DestroyDataSharingAttributesStack() {
delete DSAStack; }
2685 OMPDeclareVariantScopes.push_back(OMPDeclareVariantScope(TI));
2690 "Not in OpenMP declare variant scope!");
2692 OMPDeclareVariantScopes.pop_back();
2698 assert(
LangOpts.OpenMP &&
"Expected OpenMP compilation mode.");
2699 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2703 (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host))
2706 if (!
LangOpts.OpenMPIsDevice && DevTy &&
2707 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost)
2710 DevTy = OMPDeclareTargetDeclAttr::getDeviceType(FD);
2711 if (
LangOpts.OpenMPIsDevice && DevTy &&
2712 *DevTy == OMPDeclareTargetDeclAttr::DT_Host) {
2714 StringRef HostDevTy =
2716 Diag(Loc, diag::err_omp_wrong_device_function_call) << HostDevTy << 0;
2717 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2718 diag::note_omp_marked_device_type_here)
2722 if (!
LangOpts.OpenMPIsDevice && !
LangOpts.OpenMPOffloadMandatory && DevTy &&
2723 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost) {
2727 for (OMPDeclareVariantAttr *A :
2728 Callee->specific_attrs<OMPDeclareVariantAttr>()) {
2729 auto *DeclRefVariant = cast<DeclRefExpr>(A->getVariantFuncRef());
2730 auto *VariantFD = cast<FunctionDecl>(DeclRefVariant->getDecl());
2731 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2732 OMPDeclareTargetDeclAttr::getDeviceType(
2733 VariantFD->getMostRecentDecl());
2734 if (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host)
2740 Callee->hasAttr<OMPDeclareVariantAttr>() && HasHostAttr(Callee))
2744 OMPC_device_type, OMPC_DEVICE_TYPE_nohost);
2745 Diag(Loc, diag::err_omp_wrong_device_function_call) << NoHostDevTy << 1;
2746 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2747 diag::note_omp_marked_device_type_here)
2755 DSAStack->push(DKind, DirName, CurScope, Loc);
2765 DSAStack->setClauseParsingMode(OMPC_unknown);
2769 static std::pair<ValueDecl *, bool>
2771 SourceRange &ERange,
bool AllowArraySection =
false,
2772 StringRef DiagType =
"");
2777 bool InscanFound =
false;
2784 if (C->getClauseKind() != OMPC_reduction)
2786 auto *RC = cast<OMPReductionClause>(C);
2787 if (RC->getModifier() == OMPC_REDUCTION_inscan) {
2789 InscanLoc = RC->getModifierLoc();
2792 if (RC->getModifier() == OMPC_REDUCTION_task) {
2802 S.
Diag(RC->getModifierLoc(),
2803 diag::err_omp_reduction_task_not_parallel_or_worksharing);
2809 if (C->getClauseKind() != OMPC_reduction)
2811 auto *RC = cast<OMPReductionClause>(C);
2812 if (RC->getModifier() != OMPC_REDUCTION_inscan) {
2815 : RC->getModifierLoc(),
2816 diag::err_omp_inscan_reduction_expected);
2817 S.
Diag(InscanLoc, diag::note_omp_previous_inscan_reduction);
2820 for (
Expr *Ref : RC->varlists()) {
2821 assert(Ref &&
"NULL expr in OpenMP nontemporal clause.");
2824 Expr *SimpleRefExpr = Ref;
2831 S.
Diag(Ref->getExprLoc(),
2832 diag::err_omp_reduction_not_inclusive_exclusive)
2833 << Ref->getSourceRange();
2847 const DSAStackTy::DSAVarData &DVar,
2848 bool IsLoopIterVar =
false);
2856 if (
const auto *D = dyn_cast_or_null<OMPExecutableDirective>(CurDirective)) {
2858 if (
auto *Clause = dyn_cast<OMPLastprivateClause>(C)) {
2860 for (
Expr *DE : Clause->varlists()) {
2861 if (DE->isValueDependent() || DE->isTypeDependent()) {
2862 PrivateCopies.push_back(
nullptr);
2865 auto *DRE = cast<DeclRefExpr>(DE->IgnoreParens());
2866 auto *VD = cast<VarDecl>(DRE->getDecl());
2868 const DSAStackTy::DSAVarData DVar =
2870 if (DVar.CKind == OMPC_lastprivate) {
2877 *
this, DE->getExprLoc(),
Type.getUnqualifiedType(),
2881 PrivateCopies.push_back(
nullptr);
2885 *
this, VDPrivate, DE->
getType(), DE->getExprLoc()));
2889 PrivateCopies.push_back(
nullptr);
2892 Clause->setPrivateCopies(PrivateCopies);
2896 if (
auto *Clause = dyn_cast<OMPNontemporalClause>(C)) {
2898 for (
Expr *RefExpr : Clause->varlists()) {
2899 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
2902 Expr *SimpleRefExpr = RefExpr;
2906 PrivateRefs.push_back(RefExpr);
2911 const DSAStackTy::DSAVarData DVar =
2913 PrivateRefs.push_back(DVar.PrivateCopy ? DVar.PrivateCopy
2916 Clause->setPrivateRefs(PrivateRefs);
2919 if (
auto *Clause = dyn_cast<OMPUsesAllocatorsClause>(C)) {
2920 for (
unsigned I = 0, E = Clause->getNumberOfAllocators(); I < E; ++I) {
2926 if (!VD || !isa<VarDecl>(VD))
2928 DSAStackTy::DSAVarData DVar =
2934 Expr *MapExpr =
nullptr;
2936 DSAStack->checkMappableExprComponentListsForDecl(
2942 auto MI = MapExprComponents.rbegin();
2943 auto ME = MapExprComponents.rend();
2945 MI->getAssociatedDeclaration()->getCanonicalDecl() ==
2946 VD->getCanonicalDecl()) {
2947 MapExpr = MI->getAssociatedExpression();
2952 Diag(D.Allocator->getExprLoc(),
2953 diag::err_omp_allocator_used_in_clauses)
2958 Diag(MapExpr->getExprLoc(), diag::note_used_here)
2959 << MapExpr->getSourceRange();
2972 DiscardCleanupsInEvaluationContext();
2973 PopExpressionEvaluationContext();
2977 Expr *NumIterations,
Sema &SemaRef,
2978 Scope *S, DSAStackTy *Stack);
2987 explicit VarDeclFilterCCC(
Sema &S) : SemaRef(S) {}
2990 if (
const auto *VD = dyn_cast_or_null<VarDecl>(ND)) {
2998 std::unique_ptr<CorrectionCandidateCallback>
clone()
override {
2999 return std::make_unique<VarDeclFilterCCC>(*
this);
3008 explicit VarOrFuncDeclFilterCCC(
Sema &S) : SemaRef(S) {}
3011 if (ND && ((isa<VarDecl>(ND) && ND->
getKind() == Decl::Var) ||
3012 isa<FunctionDecl>(ND))) {
3019 std::unique_ptr<CorrectionCandidateCallback>
clone()
override {
3020 return std::make_unique<VarOrFuncDeclFilterCCC>(*
this);
3038 VarDeclFilterCCC CCC(*
this);
3044 ? diag::err_undeclared_var_use_suggest
3045 : diag::err_omp_expected_var_arg_suggest)
3047 VD = Corrected.getCorrectionDeclAs<
VarDecl>();
3049 Diag(
Id.getLoc(), Lookup.
empty() ? diag::err_undeclared_var_use
3050 : diag::err_omp_expected_var_arg)
3055 Diag(
Id.getLoc(), diag::err_omp_expected_var_arg) <<
Id.getName();
3064 Diag(
Id.getLoc(), diag::err_omp_global_var_arg)
3069 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3081 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3082 << getOpenMPDirectiveName(
Kind) << VD;
3086 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3096 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3097 << getOpenMPDirectiveName(
Kind) << VD;
3101 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3112 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3113 << getOpenMPDirectiveName(
Kind) << VD;
3117 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3126 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3127 << getOpenMPDirectiveName(
Kind) << VD;
3131 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3139 if (
Kind == OMPD_threadprivate && VD->
isUsed() &&
3141 Diag(
Id.getLoc(), diag::err_omp_var_used)
3142 << getOpenMPDirectiveName(
Kind) << VD;
3164 class LocalVarRefChecker final
3170 if (
const auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
3173 diag::err_omp_local_var_in_threadprivate_init)
3175 SemaRef.Diag(VD->
getLocation(), diag::note_defined_here)
3182 bool VisitStmt(
const Stmt *S) {
3183 for (
const Stmt *Child : S->children()) {
3184 if (Child && Visit(Child))
3189 explicit LocalVarRefChecker(
Sema &SemaRef) : SemaRef(SemaRef) {}
3196 for (
Expr *RefExpr : VarList) {
3197 auto *DE = cast<DeclRefExpr>(RefExpr);
3198 auto *VD = cast<VarDecl>(DE->getDecl());
3215 diag::err_omp_threadprivate_incomplete_type)) {
3222 Diag(ILoc, diag::err_omp_ref_type_arg)
3223 << getOpenMPDirectiveName(OMPD_threadprivate) << VD->
getType();
3227 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3235 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
3240 Diag(ILoc, diag::err_omp_var_thread_local)
3245 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3253 LocalVarRefChecker Checker(*
this);
3254 if (Checker.Visit(Init))
3258 Vars.push_back(RefExpr);
3259 DSAStack->addDSA(VD, DE, OMPC_threadprivate);
3260 VD->
addAttr(OMPThreadPrivateDeclAttr::CreateImplicit(
3263 ML->DeclarationMarkedOpenMPThreadPrivate(VD);
3266 if (!Vars.empty()) {
3274 static OMPAllocateDeclAttr::AllocatorTypeTy
3277 return OMPAllocateDeclAttr::OMPNullMemAlloc;
3278 if (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3279 Allocator->isInstantiationDependent() ||
3280 Allocator->containsUnexpandedParameterPack())
3281 return OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3282 auto AllocatorKindRes = OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3283 llvm::FoldingSetNodeID AEId;
3284 const Expr *AE = Allocator->IgnoreParenImpCasts();
3286 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
3287 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
3288 const Expr *DefAllocator = Stack->getAllocator(AllocatorKind);
3289 llvm::FoldingSetNodeID DAEId;
3292 if (AEId == DAEId) {
3293 AllocatorKindRes = AllocatorKind;
3297 return AllocatorKindRes;
3302 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
Expr *Allocator) {
3303 if (!VD->
hasAttr<OMPAllocateDeclAttr>())
3305 const auto *A = VD->
getAttr<OMPAllocateDeclAttr>();
3306 Expr *PrevAllocator = A->getAllocator();
3307 OMPAllocateDeclAttr::AllocatorTypeTy PrevAllocatorKind =
3309 bool AllocatorsMatch = AllocatorKind == PrevAllocatorKind;
3310 if (AllocatorsMatch &&
3311 AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc &&
3312 Allocator && PrevAllocator) {
3313 const Expr *AE = Allocator->IgnoreParenImpCasts();
3315 llvm::FoldingSetNodeID AEId, PAEId;
3318 AllocatorsMatch = AEId == PAEId;
3320 if (!AllocatorsMatch) {
3322 llvm::raw_svector_ostream AllocatorStream(AllocatorBuffer);
3326 llvm::raw_svector_ostream PrevAllocatorStream(PrevAllocatorBuffer);
3328 PrevAllocator->printPretty(PrevAllocatorStream,
nullptr,
3332 Allocator ? Allocator->getExprLoc() : RefExpr->
getExprLoc();
3334 Allocator ? Allocator->getSourceRange() : RefExpr->
getSourceRange();
3336 PrevAllocator ? PrevAllocator->getExprLoc() : A->getLocation();
3338 PrevAllocator ? PrevAllocator->getSourceRange() : A->getRange();
3339 S.
Diag(AllocatorLoc, diag::warn_omp_used_different_allocator)
3340 << (Allocator ? 1 : 0) << AllocatorStream.str()
3341 << (PrevAllocator ? 1 : 0) << PrevAllocatorStream.str()
3343 S.
Diag(PrevAllocatorLoc, diag::note_omp_previous_allocator)
3344 << PrevAllocatorRange;
3352 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
3354 if (VD->
hasAttr<OMPAllocateDeclAttr>())
3363 (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3364 Allocator->isInstantiationDependent() ||
3365 Allocator->containsUnexpandedParameterPack()))
3367 auto *A = OMPAllocateDeclAttr::CreateImplicit(S.
Context, AllocatorKind,
3368 Allocator, Alignment, SR);
3371 ML->DeclarationMarkedOpenMPAllocate(VD, A);
3378 assert(Clauses.size() <= 2 &&
"Expected at most two clauses.");
3379 Expr *Alignment =
nullptr;
3380 Expr *Allocator =
nullptr;
3381 if (Clauses.empty()) {
3388 targetDiag(Loc, diag::err_expected_allocator_clause);
3391 if (
const auto *AC = dyn_cast<OMPAllocatorClause>(C))
3392 Allocator = AC->getAllocator();
3393 else if (
const auto *AC = dyn_cast<OMPAlignClause>(C))
3394 Alignment = AC->getAlignment();
3396 llvm_unreachable(
"Unexpected clause on allocate directive");
3398 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
3401 for (
Expr *RefExpr : VarList) {
3402 auto *DE = cast<DeclRefExpr>(RefExpr);
3403 auto *VD = cast<VarDecl>(DE->getDecl());
3407 VD->
hasAttr<OMPThreadPrivateDeclAttr>() ||
3415 AllocatorKind, Allocator))
3423 if (AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc) {
3424 Diag(Allocator->getExprLoc(),
3425 diag::err_omp_expected_predefined_allocator)
3426 << Allocator->getSourceRange();
3430 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3436 Vars.push_back(RefExpr);
3455 Diag(Loc, diag::err_omp_invalid_scope) <<
"requires";
3469 bool SkippedClauses) {
3470 if (!SkippedClauses && Assumptions.empty())
3471 Diag(Loc, diag::err_omp_no_clause_for_directive)
3472 << llvm::omp::getAllAssumeClauseOptions()
3473 << llvm::omp::getOpenMPDirectiveName(DKind);
3475 auto *AA = AssumptionAttr::Create(
Context, llvm::join(Assumptions,
","), Loc);
3476 if (DKind == llvm::omp::Directive::OMPD_begin_assumes) {
3477 OMPAssumeScoped.push_back(AA);
3482 if (Assumptions.empty())
3485 assert(DKind == llvm::omp::Directive::OMPD_assumes &&
3486 "Unexpected omp assumption directive!");
3487 OMPAssumeGlobal.push_back(AA);
3495 while (Ctx->getLexicalParent())
3497 DeclContexts.push_back(Ctx);
3498 while (!DeclContexts.empty()) {
3500 for (
auto *SubDC : DC->
decls()) {
3501 if (SubDC->isInvalidDecl())
3503 if (
auto *CTD = dyn_cast<ClassTemplateDecl>(SubDC)) {
3504 DeclContexts.push_back(CTD->getTemplatedDecl());
3505 llvm::append_range(DeclContexts, CTD->specializations());
3508 if (
auto *DC = dyn_cast<DeclContext>(SubDC))
3509 DeclContexts.push_back(DC);
3510 if (
auto *F = dyn_cast<FunctionDecl>(SubDC)) {
3520 OMPAssumeScoped.pop_back();
3529 DSAStack->getEncounteredTargetLocs();
3531 if (!TargetLocations.empty() || !AtomicLoc.
isInvalid()) {
3532 for (
const OMPClause *CNew : ClauseList) {
3534 if (isa<OMPUnifiedSharedMemoryClause>(CNew) ||
3535 isa<OMPUnifiedAddressClause>(CNew) ||
3536 isa<OMPReverseOffloadClause>(CNew) ||
3537 isa<OMPDynamicAllocatorsClause>(CNew)) {
3538 Diag(Loc, diag::err_omp_directive_before_requires)
3539 <<
"target" << getOpenMPClauseName(CNew->getClauseKind());
3541 Diag(TargetLoc, diag::note_omp_requires_encountered_directive)
3545 isa<OMPAtomicDefaultMemOrderClause>(CNew)) {
3546 Diag(Loc, diag::err_omp_directive_before_requires)
3547 <<
"atomic" << getOpenMPClauseName(CNew->getClauseKind());
3548 Diag(AtomicLoc, diag::note_omp_requires_encountered_directive)
3554 if (!
DSAStack->hasDuplicateRequiresClause(ClauseList))
3562 const DSAStackTy::DSAVarData &DVar,
3563 bool IsLoopIterVar) {
3565 SemaRef.
Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_explicit_dsa)
3566 << getOpenMPClauseName(DVar.CKind);
3570 PDSA_StaticMemberShared,
3571 PDSA_StaticLocalVarShared,
3572 PDSA_LoopIterVarPrivate,
3573 PDSA_LoopIterVarLinear,
3574 PDSA_LoopIterVarLastprivate,
3575 PDSA_ConstVarShared,
3576 PDSA_GlobalVarShared,
3577 PDSA_TaskVarFirstprivate,
3578 PDSA_LocalVarPrivate,
3580 } Reason = PDSA_Implicit;
3581 bool ReportHint =
false;
3583 auto *VD = dyn_cast<VarDecl>(D);
3584 if (IsLoopIterVar) {
3585 if (DVar.CKind == OMPC_private)
3586 Reason = PDSA_LoopIterVarPrivate;
3587 else if (DVar.CKind == OMPC_lastprivate)
3588 Reason = PDSA_LoopIterVarLastprivate;
3590 Reason = PDSA_LoopIterVarLinear;
3592 DVar.CKind == OMPC_firstprivate) {
3593 Reason = PDSA_TaskVarFirstprivate;
3594 ReportLoc = DVar.ImplicitDSALoc;
3596 Reason = PDSA_StaticLocalVarShared;
3598 Reason = PDSA_StaticMemberShared;
3600 Reason = PDSA_GlobalVarShared;
3602 Reason = PDSA_ConstVarShared;
3603 else if (VD && VD->
isLocalVarDecl() && DVar.CKind == OMPC_private) {
3605 Reason = PDSA_LocalVarPrivate;
3607 if (Reason != PDSA_Implicit) {
3608 SemaRef.
Diag(ReportLoc, diag::note_omp_predetermined_dsa)
3609 << Reason << ReportHint
3610 << getOpenMPDirectiveName(Stack->getCurrentDirective());
3611 }
else if (DVar.ImplicitDSALoc.isValid()) {
3612 SemaRef.
Diag(DVar.ImplicitDSALoc, diag::note_omp_implicit_dsa)
3613 << getOpenMPClauseName(DVar.CKind);
3619 bool IsAggregateOrDeclareTarget) {
3622 case OMPC_DEFAULTMAP_MODIFIER_alloc:
3623 Kind = OMPC_MAP_alloc;
3625 case OMPC_DEFAULTMAP_MODIFIER_to:
3628 case OMPC_DEFAULTMAP_MODIFIER_from:
3629 Kind = OMPC_MAP_from;
3631 case OMPC_DEFAULTMAP_MODIFIER_tofrom:
3632 Kind = OMPC_MAP_tofrom;
3634 case OMPC_DEFAULTMAP_MODIFIER_present:
3640 Kind = OMPC_MAP_alloc;
3642 case OMPC_DEFAULTMAP_MODIFIER_firstprivate:
3644 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3645 case OMPC_DEFAULTMAP_MODIFIER_none:
3646 case OMPC_DEFAULTMAP_MODIFIER_default:
3651 if (IsAggregateOrDeclareTarget) {
3652 Kind = OMPC_MAP_tofrom;
3655 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3662 class DSAAttrChecker final :
public StmtVisitor<DSAAttrChecker, void> {
3665 bool ErrorFound =
false;
3666 bool TryCaptureCXXThisMembers =
false;
3668 const static unsigned DefaultmapKindNum = OMPC_DEFAULTMAP_pointer + 1;
3673 ImplicitMapModifier[DefaultmapKindNum];
3675 llvm::SmallDenseSet<const ValueDecl *, 4> ImplicitDeclarations;
3679 if (!S->hasAssociatedStmt() || !S->getAssociatedStmt())
3681 if (S->getDirectiveKind() == OMPD_atomic ||
3682 S->getDirectiveKind() == OMPD_critical ||
3683 S->getDirectiveKind() == OMPD_section ||
3684 S->getDirectiveKind() == OMPD_master ||
3685 S->getDirectiveKind() == OMPD_masked ||
3687 Visit(S->getAssociatedStmt());
3690 visitSubCaptures(S->getInnermostCapturedStmt());
3693 if (TryCaptureCXXThisMembers ||
3695 llvm::any_of(S->getInnermostCapturedStmt()->captures(),
3697 return C.capturesThis();
3699 bool SavedTryCaptureCXXThisMembers = TryCaptureCXXThisMembers;
3700 TryCaptureCXXThisMembers =
true;
3701 Visit(S->getInnermostCapturedStmt()->getCapturedStmt());
3702 TryCaptureCXXThisMembers = SavedTryCaptureCXXThisMembers;
3709 if (
auto *FC = dyn_cast<OMPFirstprivateClause>(C)) {
3710 for (
Expr *Ref : FC->varlists())
3722 if (
auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
3725 !Stack->getTopDSA(VD,
false).RefExpr &&
3726 !Stack->isImplicitDefaultFirstprivateFD(VD))) {
3727 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(VD))
3728 if (!CED->hasAttr<OMPCaptureNoInitAttr>()) {
3729 Visit(CED->getInit());
3732 }
else if (VD->
isImplicit() || isa<OMPCapturedExprDecl>(VD))
3735 if (!Stack->isImplicitDefaultFirstprivateFD(VD))
3740 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3741 !Stack->isImplicitTaskFirstprivate(VD))
3744 if (Stack->isUsesAllocatorsDecl(VD))
3747 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
3749 if (DVar.RefExpr || !ImplicitDeclarations.insert(VD).second)
3753 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
3754 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
3757 !Res || *Res != OMPDeclareTargetDeclAttr::MT_Link) &&
3758 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3759 !Stack->isImplicitTaskFirstprivate(VD))
3768 if (DVar.CKind == OMPC_unknown &&
3769 (Stack->getDefaultDSA() == DSA_none ||
3770 Stack->getDefaultDSA() == DSA_private ||
3771 Stack->getDefaultDSA() == DSA_firstprivate) &&
3772 isImplicitOrExplicitTaskingRegion(DKind) &&
3773 VarsWithInheritedDSA.count(VD) == 0) {
3774 bool InheritedDSA = Stack->getDefaultDSA() == DSA_none;
3775 if (!InheritedDSA && (Stack->getDefaultDSA() == DSA_firstprivate ||
3776 Stack->getDefaultDSA() == DSA_private)) {
3777 DSAStackTy::DSAVarData DVar =
3778 Stack->getImplicitDSA(VD,
false);
3779 InheritedDSA = DVar.CKind == OMPC_unknown;
3782 VarsWithInheritedDSA[VD] = E;
3783 if (Stack->getDefaultDSA() == DSA_none)
3798 bool IsModifierNone = Stack->getDefaultmapModifier(ClauseKind) ==
3799 OMPC_DEFAULTMAP_MODIFIER_none;
3800 if (DVar.CKind == OMPC_unknown && IsModifierNone &&
3801 VarsWithInheritedDSA.count(VD) == 0 && !Res) {
3805 if (!Stack->checkMappableExprComponentListsForDecl(
3810 auto MI = MapExprComponents.rbegin();
3811 auto ME = MapExprComponents.rend();
3812 return MI != ME && MI->getAssociatedDeclaration() == VD;
3814 VarsWithInheritedDSA[VD] = E;
3820 bool IsModifierPresent = Stack->getDefaultmapModifier(ClauseKind) ==
3821 OMPC_DEFAULTMAP_MODIFIER_present;
3822 if (IsModifierPresent) {
3823 if (!llvm::is_contained(ImplicitMapModifier[ClauseKind],
3824 OMPC_MAP_MODIFIER_present)) {
3825 ImplicitMapModifier[ClauseKind].push_back(
3826 OMPC_MAP_MODIFIER_present);
3832 !Stack->isLoopControlVariable(VD).first) {
3833 if (!Stack->checkMappableExprComponentListsForDecl(
3838 if (SemaRef.LangOpts.OpenMP >= 50)
3839 return !StackComponents.empty();
3842 return StackComponents.size() == 1 ||
3844 llvm::drop_begin(llvm::reverse(StackComponents)),
3845 [](const OMPClauseMappableExprCommon::
3846 MappableComponent &MC) {
3847 return MC.getAssociatedDeclaration() ==
3849 (isa<OMPArraySectionExpr>(
3850 MC.getAssociatedExpression()) ||
3851 isa<OMPArrayShapingExpr>(
3852 MC.getAssociatedExpression()) ||
3853 isa<ArraySubscriptExpr>(
3854 MC.getAssociatedExpression()));
3857 bool IsFirstprivate =
false;
3859 if (
const auto *RD =
3861 IsFirstprivate = RD->isLambda();
3863 IsFirstprivate || (Stack->mustBeFirstprivate(ClauseKind) && !Res);
3864 if (IsFirstprivate) {
3865 ImplicitFirstprivate.emplace_back(E);
3868 Stack->getDefaultmapModifier(ClauseKind);
3870 M, ClauseKind == OMPC_DEFAULTMAP_aggregate || Res);
3871 ImplicitMap[ClauseKind][
Kind].emplace_back(E);
3881 DVar = Stack->hasInnermostDSA(
3884 return C == OMPC_reduction && !AppliedToPointee;
3893 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
3899 DVar = Stack->getImplicitDSA(VD,
false);
3901 (((Stack->getDefaultDSA() == DSA_firstprivate &&
3902 DVar.CKind == OMPC_firstprivate) ||
3903 (Stack->getDefaultDSA() == DSA_private &&
3904 DVar.CKind == OMPC_private)) &&
3906 !Stack->isLoopControlVariable(VD).first) {
3907 if (Stack->getDefaultDSA() == DSA_private)
3908 ImplicitPrivate.push_back(E);
3910 ImplicitFirstprivate.push_back(E);
3917 *Res == OMPDeclareTargetDeclAttr::MT_Link) {
3918 Stack->addToParentTargetRegionLinkGlobals(E);
3932 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(FD,
false);
3935 if (DVar.RefExpr || !ImplicitDeclarations.insert(FD).second)
3939 !Stack->isLoopControlVariable(FD).first &&
3940 !Stack->checkMappableExprComponentListsForDecl(
3945 return isa<CXXThisExpr>(
3947 StackComponents.back().getAssociatedExpression())
3959 if (Stack->isClassPreviouslyMapped(TE->getType()))
3963 Stack->getDefaultmapModifier(OMPC_DEFAULTMAP_aggregate);
3968 ImplicitMap[ClauseKind][
Kind].emplace_back(E);
3977 DVar = Stack->hasInnermostDSA(
3980 return C == OMPC_reduction && !AppliedToPointee;
3989 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
3995 DVar = Stack->getImplicitDSA(FD,
false);
3997 !Stack->isLoopControlVariable(FD).first) {
4002 if (DVar.CKind != OMPC_unknown)
4003 ImplicitFirstprivate.push_back(E);
4010 Stack->getCurrentDirective(),
4013 const auto *VD = cast<ValueDecl>(
4014 CurComponents.back().getAssociatedDeclaration()->getCanonicalDecl());
4015 if (!Stack->checkMappableExprComponentListsForDecl(
4021 auto CCI = CurComponents.rbegin();
4022 auto CCE = CurComponents.rend();
4023 for (const auto &SC : llvm::reverse(StackComponents)) {
4025 if (CCI->getAssociatedExpression()->getStmtClass() !=
4026 SC.getAssociatedExpression()->getStmtClass())
4027 if (!((isa<OMPArraySectionExpr>(
4028 SC.getAssociatedExpression()) ||
4029 isa<OMPArrayShapingExpr>(
4030 SC.getAssociatedExpression())) &&
4031 isa<ArraySubscriptExpr>(
4032 CCI->getAssociatedExpression())))
4035 const Decl *CCD = CCI->getAssociatedDeclaration();
4036 const Decl *SCD = SC.getAssociatedDeclaration();
4037 CCD = CCD ? CCD->getCanonicalDecl() : nullptr;
4038 SCD = SCD ? SCD->getCanonicalDecl() : nullptr;
4041 std::advance(CCI, 1);
4049 }
else if (!TryCaptureCXXThisMembers) {
4057 if (isa_and_nonnull<OMPPrivateClause>(C))
4063 if (C && !((isa<OMPFirstprivateClause>(C) || isa<OMPMapClause>(C)) &&
4066 for (
Stmt *CC :
C->children()) {
4073 VisitSubCaptures(S);
4082 for (
Stmt *C : S->arguments()) {
4089 if (
Expr *Callee = S->getCallee()) {
4090 auto *CI =
Callee->IgnoreParenImpCasts();
4091 if (
auto *CE = dyn_cast<MemberExpr>(CI))
4092 Visit(CE->getBase());
4093 else if (
auto *CE = dyn_cast<DeclRefExpr>(CI))
4097 void VisitStmt(
Stmt *S) {
4098 for (
Stmt *C : S->children()) {
4109 if (!Cap.capturesVariable() && !Cap.capturesVariableByCopy())
4111 VarDecl *VD = Cap.getCapturedVar();
4115 Stack->checkMappableExprComponentListsForDecl(
4122 Cap.getLocation(),
true);
4126 bool isErrorFound()
const {
return ErrorFound; }
4128 return ImplicitFirstprivate;
4133 return ImplicitMap[DK][MK];
4137 return ImplicitMapModifier[
Kind];
4140 return VarsWithInheritedDSA;
4144 : Stack(S), SemaRef(SemaRef), ErrorFound(
false), CS(CS) {
4159 Traits.emplace_back(llvm::omp::TraitProperty::construct_target_target);
4161 Traits.emplace_back(llvm::omp::TraitProperty::construct_teams_teams);
4163 Traits.emplace_back(llvm::omp::TraitProperty::construct_parallel_parallel);
4165 Traits.emplace_back(llvm::omp::TraitProperty::construct_for_for);
4167 Traits.emplace_back(llvm::omp::TraitProperty::construct_simd_simd);
4168 Stack->handleConstructTrait(Traits, ScopeEntry);
4174 case OMPD_parallel_for:
4175 case OMPD_parallel_for_simd:
4176 case OMPD_parallel_sections:
4177 case OMPD_parallel_master:
4178 case OMPD_parallel_masked:
4179 case OMPD_parallel_loop:
4181 case OMPD_teams_distribute:
4182 case OMPD_teams_distribute_simd: {
4187 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4188 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4189 std::make_pair(StringRef(),
QualType())
4195 case OMPD_target_teams:
4196 case OMPD_target_parallel:
4197 case OMPD_target_parallel_for:
4198 case OMPD_target_parallel_for_simd:
4199 case OMPD_target_teams_loop:
4200 case OMPD_target_parallel_loop:
4201 case OMPD_target_teams_distribute:
4202 case OMPD_target_teams_distribute_simd: {
4212 std::make_pair(
".global_tid.", KmpInt32Ty),
4213 std::make_pair(
".part_id.", KmpInt32PtrTy),
4214 std::make_pair(
".privates.", VoidPtrTy),
4219 std::make_pair(StringRef(),
QualType())
4226 AlwaysInlineAttr::CreateImplicit(
4228 AlwaysInlineAttr::Keyword_forceinline));
4230 std::make_pair(StringRef(),
QualType())
4236 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4237 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4238 std::make_pair(StringRef(),
QualType())
4243 ParamsTeamsOrParallel, 2);
4247 case OMPD_target_simd: {
4257 std::make_pair(
".global_tid.", KmpInt32Ty),
4258 std::make_pair(
".part_id.", KmpInt32PtrTy),
4259 std::make_pair(
".privates.", VoidPtrTy),
4264 std::make_pair(StringRef(),
QualType())
4271 AlwaysInlineAttr::CreateImplicit(
4273 AlwaysInlineAttr::Keyword_forceinline));
4275 std::make_pair(StringRef(),
QualType()),
4295 case OMPD_taskgroup:
4296 case OMPD_distribute:
4297 case OMPD_distribute_simd:
4299 case OMPD_target_data:
4300 case OMPD_dispatch: {
4302 std::make_pair(StringRef(),
QualType())
4318 std::make_pair(
".global_tid.", KmpInt32Ty),
4319 std::make_pair(
".part_id.", KmpInt32PtrTy),
4320 std::make_pair(
".privates.", VoidPtrTy),
4325 std::make_pair(StringRef(),
QualType())
4332 AlwaysInlineAttr::CreateImplicit(
4334 AlwaysInlineAttr::Keyword_forceinline));
4338 case OMPD_taskloop_simd:
4339 case OMPD_master_taskloop:
4340 case OMPD_masked_taskloop:
4341 case OMPD_masked_taskloop_simd:
4342 case OMPD_master_taskloop_simd: {
4360 std::make_pair(
".global_tid.", KmpInt32Ty),
4361 std::make_pair(
".part_id.", KmpInt32PtrTy),
4362 std::make_pair(
".privates.", VoidPtrTy),
4367 std::make_pair(
".lb.", KmpUInt64Ty),
4368 std::make_pair(
".ub.", KmpUInt64Ty),
4369 std::make_pair(
".st.", KmpInt64Ty),
4370 std::make_pair(
".liter.", KmpInt32Ty),
4371 std::make_pair(
".reductions.", VoidPtrTy),
4372 std::make_pair(StringRef(),
QualType())
4379 AlwaysInlineAttr::CreateImplicit(
4381 AlwaysInlineAttr::Keyword_forceinline));
4384 case OMPD_parallel_masked_taskloop:
4385 case OMPD_parallel_masked_taskloop_simd:
4386 case OMPD_parallel_master_taskloop:
4387 case OMPD_parallel_master_taskloop_simd: {
4401 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4402 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4403 std::make_pair(StringRef(),
QualType())
4413 std::make_pair(
".global_tid.", KmpInt32Ty),
4414 std::make_pair(
".part_id.", KmpInt32PtrTy),
4415 std::make_pair(
".privates.", VoidPtrTy),
4420 std::make_pair(
".lb.", KmpUInt64Ty),
4421 std::make_pair(
".ub.", KmpUInt64Ty),
4422 std::make_pair(
".st.", KmpInt64Ty),
4423 std::make_pair(
".liter.", KmpInt32Ty),
4424 std::make_pair(
".reductions.", VoidPtrTy),
4425 std::make_pair(StringRef(),
QualType())
4432 AlwaysInlineAttr::CreateImplicit(
4434 AlwaysInlineAttr::Keyword_forceinline));
4437 case OMPD_distribute_parallel_for_simd:
4438 case OMPD_distribute_parallel_for: {
4443 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4444 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4447 std::make_pair(StringRef(),
QualType())
4453 case OMPD_target_teams_distribute_parallel_for:
4454 case OMPD_target_teams_distribute_parallel_for_simd: {
4465 std::make_pair(
".global_tid.", KmpInt32Ty),
4466 std::make_pair(
".part_id.", KmpInt32PtrTy),
4467 std::make_pair(
".privates.", VoidPtrTy),
4472 std::make_pair(StringRef(),
QualType())
4479 AlwaysInlineAttr::CreateImplicit(
4481 AlwaysInlineAttr::Keyword_forceinline));
4483 std::make_pair(StringRef(),
QualType())
4490 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4491 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4492 std::make_pair(StringRef(),
QualType())
4499 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4500 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4503 std::make_pair(StringRef(),
QualType())
4512 case OMPD_teams_loop: {
4518 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4519 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4520 std::make_pair(StringRef(),
QualType())
4528 case OMPD_teams_distribute_parallel_for:
4529 case OMPD_teams_distribute_parallel_for_simd: {
4535 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4536 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4537 std::make_pair(StringRef(),
QualType())
4544 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4545 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4548 std::make_pair(StringRef(),
QualType())
4556 case OMPD_target_update:
4557 case OMPD_target_enter_data:
4558 case OMPD_target_exit_data: {
4568 std::make_pair(
".global_tid.", KmpInt32Ty),
4569 std::make_pair(
".part_id.", KmpInt32PtrTy),
4570 std::make_pair(
".privates.", VoidPtrTy),
4575 std::make_pair(StringRef(),
QualType())
4582 AlwaysInlineAttr::CreateImplicit(
4584 AlwaysInlineAttr::Keyword_forceinline));
4587 case OMPD_threadprivate:
4589 case OMPD_taskyield:
4593 case OMPD_cancellation_point:
4598 case OMPD_declare_reduction:
4599 case OMPD_declare_mapper:
4600 case OMPD_declare_simd:
4601 case OMPD_declare_target:
4602 case OMPD_end_declare_target:
4604 case OMPD_declare_variant:
4605 case OMPD_begin_declare_variant:
4606 case OMPD_end_declare_variant:
4607 case OMPD_metadirective:
4608 llvm_unreachable(
"OpenMP Directive is not allowed");
4611 llvm_unreachable(
"Unknown OpenMP directive");
4617 int Sema::getNumberOfConstructScopes(
unsigned Level)
const {
4624 return CaptureRegions.size();
4628 Expr *CaptureExpr,
bool WithInit,
4630 bool AsExpression) {
4631 assert(CaptureExpr);
4637 Ty = C.getLValueReferenceType(Ty);
4639 Ty = C.getPointerType(Ty);
4651 CED->addAttr(OMPCaptureNoInitAttr::CreateImplicit(C));
4662 CD = cast<OMPCapturedExprDecl>(VD);
4701 class CaptureRegionUnwinderRAII {
4708 CaptureRegionUnwinderRAII(
Sema &S,
bool &ErrorFound,
4710 : S(S), ErrorFound(ErrorFound), DKind(DKind) {}
4711 ~CaptureRegionUnwinderRAII() {
4714 while (--ThisCaptureLevel >= 0)
4727 DSAStack->getCurrentDirective()))) {
4729 if (
const auto *RD =
Type.getCanonicalType()
4730 .getNonReferenceType()
4732 bool SavedForceCaptureByReferenceInTargetExecutable =
4733 DSAStack->isForceCaptureByReferenceInTargetExecutable();
4734 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4736 if (RD->isLambda()) {
4737 llvm::DenseMap<const ValueDecl *, FieldDecl *> Captures;
4739 RD->getCaptureFields(Captures, ThisCapture);
4742 VarDecl *VD = cast<VarDecl>(LC.getCapturedVar());
4747 }
else if (LC.getCaptureKind() ==
LCK_This) {
4755 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4756 SavedForceCaptureByReferenceInTargetExecutable);
4766 for (
const OMPClause *Clause : Clauses) {
4768 Ordered = cast<OMPOrderedClause>(Clause);
4770 Order = cast<OMPOrderClause>(Clause);
4771 if (Order->
getKind() != OMPC_ORDER_concurrent)
4774 if (Ordered && Order)
4778 if (Ordered && Order) {
4780 diag::err_omp_simple_clause_incompatible_with_ordered)
4781 << getOpenMPClauseName(OMPC_order)
4795 if (
DSAStack->getCurrentDirective() == OMPD_atomic ||
4796 DSAStack->getCurrentDirective() == OMPD_critical ||
4797 DSAStack->getCurrentDirective() == OMPD_section ||
4798 DSAStack->getCurrentDirective() == OMPD_master ||
4799 DSAStack->getCurrentDirective() == OMPD_masked)
4802 bool ErrorFound =
false;
4803 CaptureRegionUnwinderRAII CaptureRegionUnwinder(
4804 *
this, ErrorFound,
DSAStack->getCurrentDirective());
4805 if (!S.isUsable()) {
4820 DSAStack->getCurrentDirective() == OMPD_target) &&
4824 auto *IRC = cast<OMPInReductionClause>(Clause);
4825 for (
Expr *E : IRC->taskgroup_descriptors())
4837 if (
auto *E = cast_or_null<Expr>(VarRef)) {
4841 DSAStack->setForceVarCapturing(
false);
4843 DSAStack->getCurrentDirective())) {
4844 assert(CaptureRegions.empty() &&
4845 "No captured regions in loop transformation directives.");
4846 }
else if (CaptureRegions.size() > 1 ||
4847 CaptureRegions.back() != OMPD_unknown) {
4851 if (
Expr *E = C->getPostUpdateExpr())
4856 SC = cast<OMPScheduleClause>(Clause);
4858 OC = cast<OMPOrderedClause>(Clause);
4860 LCs.push_back(cast<OMPLinearClause>(Clause));
4871 OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
4876 diag::err_omp_simple_clause_incompatible_with_ordered)
4877 << getOpenMPClauseName(OMPC_schedule)
4879 OMPC_SCHEDULE_MODIFIER_nonmonotonic)
4890 Diag(C->getBeginLoc(), diag::err_omp_linear_ordered)
4899 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
4906 unsigned CompletedRegions = 0;
4911 if (ThisCaptureRegion != OMPD_unknown) {
4919 if (CaptureRegion == ThisCaptureRegion ||
4920 CaptureRegion == OMPD_unknown) {
4921 if (
auto *DS = cast_or_null<DeclStmt>(C->getPreInitStmt())) {
4922 for (
Decl *D : DS->decls())
4928 if (ThisCaptureRegion == OMPD_target) {
4932 if (
const auto *UAC = dyn_cast<OMPUsesAllocatorsClause>(C)) {
4933 for (
unsigned I = 0,
End = UAC->getNumberOfAllocators(); I <
End;
4943 if (ThisCaptureRegion == OMPD_parallel) {
4947 if (
auto *RC = dyn_cast<OMPReductionClause>(C)) {
4948 if (RC->getModifier() != OMPC_REDUCTION_inscan)
4950 for (
Expr *E : RC->copy_array_temps())
4953 if (
auto *AC = dyn_cast<OMPAlignedClause>(C)) {
4954 for (
Expr *E : AC->varlists())
4959 if (++CompletedRegions == CaptureRegions.size())
4970 if (CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_cancellation_point)
4973 if (CancelRegion == OMPD_parallel || CancelRegion == OMPD_for ||
4974 CancelRegion == OMPD_sections || CancelRegion == OMPD_taskgroup)
4977 SemaRef.
Diag(StartLoc, diag::err_omp_wrong_cancel_region)
4978 << getOpenMPDirectiveName(CancelRegion);
4988 if (Stack->getCurScope()) {
4991 bool NestingProhibited =
false;
4992 bool CloseNesting =
true;
4993 bool OrphanSeen =
false;
4996 ShouldBeInParallelRegion,
4997 ShouldBeInOrderedRegion,
4998 ShouldBeInTargetRegion,
4999 ShouldBeInTeamsRegion,
5000 ShouldBeInLoopSimdRegion,
5001 } Recommend = NoRecommend;
5002 if (SemaRef.
LangOpts.OpenMP >= 51 && Stack->isParentOrderConcurrent() &&
5003 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_loop &&
5004 CurrentRegion != OMPD_parallel &&
5006 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_order)
5007 << getOpenMPDirectiveName(CurrentRegion);
5011 ((SemaRef.
LangOpts.OpenMP <= 45 && CurrentRegion != OMPD_ordered) ||
5012 (SemaRef.
LangOpts.OpenMP >= 50 && CurrentRegion != OMPD_ordered &&
5013 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_atomic &&
5014 CurrentRegion != OMPD_scan))) {
5027 SemaRef.
Diag(StartLoc, (CurrentRegion != OMPD_simd)
5028 ? diag::err_omp_prohibited_region_simd
5029 : diag::warn_omp_nesting_simd)
5030 << (SemaRef.
LangOpts.OpenMP >= 50 ? 1 : 0);
5031 return CurrentRegion != OMPD_simd;
5033 if (ParentRegion == OMPD_atomic) {
5036 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_atomic);
5039 if (CurrentRegion == OMPD_section) {
5044 if (ParentRegion != OMPD_sections &&
5045 ParentRegion != OMPD_parallel_sections) {
5046 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_section_directive)
5047 << (ParentRegion != OMPD_unknown)
5048 << getOpenMPDirectiveName(ParentRegion);
5056 if (ParentRegion == OMPD_unknown &&
5058 CurrentRegion != OMPD_cancellation_point &&
5059 CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_scan)
5061 if (CurrentRegion == OMPD_cancellation_point ||
5062 CurrentRegion == OMPD_cancel) {
5075 !((CancelRegion == OMPD_parallel &&
5076 (ParentRegion == OMPD_parallel ||
5077 ParentRegion == OMPD_target_parallel)) ||
5078 (CancelRegion == OMPD_for &&
5079 (ParentRegion == OMPD_for || ParentRegion == OMPD_parallel_for ||
5080 ParentRegion == OMPD_target_parallel_for ||
5081 ParentRegion == OMPD_distribute_parallel_for ||
5082 ParentRegion == OMPD_teams_distribute_parallel_for ||
5083 ParentRegion == OMPD_target_teams_distribute_parallel_for)) ||
5084 (CancelRegion == OMPD_taskgroup &&
5085 (ParentRegion == OMPD_task ||
5087 (ParentRegion == OMPD_taskloop ||
5088 ParentRegion == OMPD_master_taskloop ||
5089 ParentRegion == OMPD_masked_taskloop ||
5090 ParentRegion == OMPD_parallel_masked_taskloop ||
5091 ParentRegion == OMPD_parallel_master_taskloop)))) ||
5092 (CancelRegion == OMPD_sections &&
5093 (ParentRegion == OMPD_section || ParentRegion == OMPD_sections ||
5094 ParentRegion == OMPD_parallel_sections)));
5095 OrphanSeen = ParentRegion == OMPD_unknown;
5096 }
else if (CurrentRegion == OMPD_master || CurrentRegion == OMPD_masked) {
5103 }
else if (CurrentRegion == OMPD_critical && CurrentName.
getName()) {
5109 bool DeadLock = Stack->hasDirective(
5113 if (K == OMPD_critical && DNI.
getName() == CurrentName.
getName()) {
5114 PreviousCriticalLoc = Loc;
5121 SemaRef.
Diag(StartLoc,
5122 diag::err_omp_prohibited_region_critical_same_name)
5124 if (PreviousCriticalLoc.
isValid())
5125 SemaRef.
Diag(PreviousCriticalLoc,
5126 diag::note_omp_previous_critical_region);
5129 }
else if (CurrentRegion == OMPD_barrier) {
5137 ParentRegion == OMPD_master || ParentRegion == OMPD_masked ||
5138 ParentRegion == OMPD_parallel_master ||
5139 ParentRegion == OMPD_parallel_masked ||
5140 ParentRegion == OMPD_critical || ParentRegion == OMPD_ordered;
5152 ParentRegion == OMPD_master || ParentRegion == OMPD_masked ||
5153 ParentRegion == OMPD_parallel_master ||
5154 ParentRegion == OMPD_parallel_masked ||
5155 ParentRegion == OMPD_critical || ParentRegion == OMPD_ordered;
5156 Recommend = ShouldBeInParallelRegion;
5157 }
else if (CurrentRegion == OMPD_ordered) {
5166 NestingProhibited = ParentRegion == OMPD_critical ||
5169 Stack->isParentOrderedRegion());
5170 Recommend = ShouldBeInOrderedRegion;
5176 (SemaRef.
LangOpts.OpenMP <= 45 && ParentRegion != OMPD_target) ||
5177 (SemaRef.
LangOpts.OpenMP >= 50 && ParentRegion != OMPD_unknown &&
5178 ParentRegion != OMPD_target);
5179 OrphanSeen = ParentRegion == OMPD_unknown;
5180 Recommend = ShouldBeInTargetRegion;
5181 }
else if (CurrentRegion == OMPD_scan) {
5187 (ParentRegion != OMPD_simd && ParentRegion != OMPD_for &&
5188 ParentRegion != OMPD_for_simd && ParentRegion != OMPD_parallel_for &&
5189 ParentRegion != OMPD_parallel_for_simd);
5190 OrphanSeen = ParentRegion == OMPD_unknown;
5191 Recommend = ShouldBeInLoopSimdRegion;
5193 if (!NestingProhibited &&
5196 (ParentRegion == OMPD_teams || ParentRegion == OMPD_target_teams)) {
5208 CurrentRegion != OMPD_loop &&
5210 CurrentRegion == OMPD_atomic);
5211 Recommend = ShouldBeInParallelRegion;
5213 if (!NestingProhibited && CurrentRegion == OMPD_loop) {
5218 NestingProhibited = BindKind == OMPC_BIND_teams &&
5219 ParentRegion != OMPD_teams &&
5220 ParentRegion != OMPD_target_teams;
5221 Recommend = ShouldBeInTeamsRegion;
5223 if (!NestingProhibited &&
5229 (ParentRegion != OMPD_teams && ParentRegion != OMPD_target_teams);
5230 Recommend = ShouldBeInTeamsRegion;
5232 if (!NestingProhibited &&
5239 NestingProhibited = Stack->hasDirective(
5243 OffendingRegion = K;
5249 CloseNesting =
false;
5251 if (NestingProhibited) {
5253 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
5254 << getOpenMPDirectiveName(CurrentRegion) << Recommend;
5256 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region)
5257 << CloseNesting << getOpenMPDirectiveName(OffendingRegion)
5258 << Recommend << getOpenMPDirectiveName(CurrentRegion);
5273 bool ErrorFound =
false;
5274 unsigned NamedModifiersNumber = 0;
5275 llvm::IndexedMap<const OMPIfClause *, Kind2Unsigned> FoundNameModifiers;
5276 FoundNameModifiers.resize(llvm::omp::Directive_enumSize + 1);
5279 if (
const auto *IC = dyn_cast_or_null<OMPIfClause>(C)) {
5283 if (FoundNameModifiers[CurNM]) {
5284 S.
Diag(C->getBeginLoc(), diag::err_omp_more_one_clause)
5285 << getOpenMPDirectiveName(
Kind) << getOpenMPClauseName(OMPC_if)
5286 << (CurNM != OMPD_unknown) << getOpenMPDirectiveName(CurNM);
5288 }
else if (CurNM != OMPD_unknown) {
5289 NameModifierLoc.push_back(IC->getNameModifierLoc());
5290 ++NamedModifiersNumber;
5292 FoundNameModifiers[CurNM] = IC;
5293 if (CurNM == OMPD_unknown)
5299 if (!llvm::is_contained(AllowedNameModifiers, CurNM)) {
5300 S.
Diag(IC->getNameModifierLoc(),
5301 diag::err_omp_wrong_if_directive_name_modifier)
5302 << getOpenMPDirectiveName(CurNM) << getOpenMPDirectiveName(
Kind);
5309 if (FoundNameModifiers[OMPD_unknown] && NamedModifiersNumber > 0) {
5310 if (NamedModifiersNumber == AllowedNameModifiers.size()) {
5311 S.
Diag(FoundNameModifiers[OMPD_unknown]->getBeginLoc(),
5312 diag::err_omp_no_more_if_clause);
5316 unsigned AllowedCnt = 0;
5317 unsigned TotalAllowedNum =
5318 AllowedNameModifiers.size() - NamedModifiersNumber;
5319 for (
unsigned Cnt = 0,
End = AllowedNameModifiers.size(); Cnt <
End;
5322 if (!FoundNameModifiers[NM]) {
5324 Values += getOpenMPDirectiveName(NM);
5326 if (AllowedCnt + 2 == TotalAllowedNum)
5328 else if (AllowedCnt + 1 != TotalAllowedNum)
5333 S.
Diag(FoundNameModifiers[OMPD_unknown]->getCondition()->getBeginLoc(),
5334 diag::err_omp_unnamed_if_clause)
5335 << (TotalAllowedNum > 1) << Values;
5338 S.
Diag(Loc, diag::note_omp_previous_named_if_clause);
5348 bool AllowArraySection,
5349 StringRef DiagType) {
5352 return std::make_pair(
nullptr,
true);
5364 } IsArrayExpr = NoArrayExpr;
5365 if (AllowArraySection) {
5366 if (
auto *ASE = dyn_cast_or_null<ArraySubscriptExpr>(RefExpr)) {
5367 Expr *
Base = ASE->getBase()->IgnoreParenImpCasts();
5368 while (
auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base))
5369 Base = TempASE->getBase()->IgnoreParenImpCasts();
5371 IsArrayExpr = ArraySubscript;
5372 }
else if (
auto *OASE = dyn_cast_or_null<OMPArraySectionExpr>(RefExpr)) {
5373 Expr *
Base = OASE->getBase()->IgnoreParenImpCasts();
5374 while (
auto *TempOASE = dyn_cast<OMPArraySectionExpr>(
Base))
5375 Base = TempOASE->getBase()->IgnoreParenImpCasts();
5376 while (
auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base))
5377 Base = TempASE->getBase()->IgnoreParenImpCasts();
5379 IsArrayExpr = OMPArraySection;
5385 auto *DE = dyn_cast_or_null<DeclRefExpr>(RefExpr);
5386 auto *ME = dyn_cast_or_null<MemberExpr>(RefExpr);
5387 if ((!DE || !isa<VarDecl>(DE->getDecl())) &&
5389 !isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()) ||
5390 !isa<FieldDecl>(ME->getMemberDecl()))) {
5391 if (IsArrayExpr != NoArrayExpr) {
5392 S.
Diag(ELoc, diag::err_omp_expected_base_var_name)
5393 << IsArrayExpr << ERange;
5394 }
else if (!DiagType.empty()) {
5398 S.
Diag(ELoc, diag::err_omp_expected_var_name_member_expr_with_type)
5399 << DiagSelect << DiagType << ERange;
5403 ? diag::err_omp_expected_var_name_member_expr_or_array_item
5404 : diag::err_omp_expected_var_name_member_expr)
5407 return std::make_pair(
nullptr,
false);
5409 return std::make_pair(
5416 class AllocatorChecker final :
public ConstStmtVisitor<AllocatorChecker, bool> {
5417 DSAStackTy *S =
nullptr;
5421 return S->isUsesAllocatorsDecl(E->
getDecl())
5422 .value_or(DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
5423 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait;
5425 bool VisitStmt(
const Stmt *S) {
5426 for (
const Stmt *Child : S->children()) {
5427 if (Child && Visit(Child))
5432 explicit AllocatorChecker(DSAStackTy *S) : S(S) {}
5439 "Expected non-dependent context.");
5440 auto AllocateRange =
5443 auto PrivateRange = llvm::make_filter_range(Clauses, [](
const OMPClause *C) {
5448 if (
Cl->getClauseKind() == OMPC_private) {
5449 auto *PC = cast<OMPPrivateClause>(
Cl);
5450 I = PC->private_copies().begin();
5451 It = PC->varlist_begin();
5452 Et = PC->varlist_end();
5453 }
else if (
Cl->getClauseKind() == OMPC_firstprivate) {
5454 auto *PC = cast<OMPFirstprivateClause>(
Cl);
5455 I = PC->private_copies().begin();
5456 It = PC->varlist_begin();
5457 Et = PC->varlist_end();
5458 }
else if (
Cl->getClauseKind() == OMPC_lastprivate) {
5459 auto *PC = cast<OMPLastprivateClause>(
Cl);
5460 I = PC->private_copies().begin();
5461 It = PC->varlist_begin();
5462 Et = PC->varlist_end();
5463 }
else if (
Cl->getClauseKind() == OMPC_linear) {
5464 auto *PC = cast<OMPLinearClause>(
Cl);
5465 I = PC->privates().begin();
5466 It = PC->varlist_begin();
5467 Et = PC->varlist_end();
5468 }
else if (
Cl->getClauseKind() == OMPC_reduction) {
5469 auto *PC = cast<OMPReductionClause>(
Cl);
5470 I = PC->privates().begin();
5471 It = PC->varlist_begin();
5472 Et = PC->varlist_end();
5473 }
else if (
Cl->getClauseKind() == OMPC_task_reduction) {
5474 auto *PC = cast<OMPTaskReductionClause>(
Cl);
5475 I = PC->privates().begin();
5476 It = PC->varlist_begin();
5477 Et = PC->varlist_end();
5478 }
else if (
Cl->getClauseKind() == OMPC_in_reduction) {
5479 auto *PC = cast<OMPInReductionClause>(
Cl);
5480 I = PC->privates().begin();
5481 It = PC->varlist_begin();
5482 Et = PC->varlist_end();
5484 llvm_unreachable(
"Expected private clause.");
5486 for (
Expr *E : llvm::make_range(It, Et)) {
5493 Expr *SimpleRefExpr = E;
5496 DeclToCopy.try_emplace(Res.first,
5497 cast<VarDecl>(cast<DeclRefExpr>(*I)->getDecl()));
5502 auto *AC = cast<OMPAllocateClause>(C);
5506 AC->getAllocator()) {
5507 Expr *Allocator = AC->getAllocator();
5513 AllocatorChecker Checker(Stack);
5514 if (Checker.Visit(Allocator))
5515 S.
Diag(Allocator->getExprLoc(),
5516 diag::err_omp_allocator_not_in_uses_allocators)
5517 << Allocator->getSourceRange();
5519 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
5525 if (AllocatorKind == OMPAllocateDeclAttr::OMPThreadMemAlloc &&
5528 S.
Diag(AC->getAllocator()->getExprLoc(),
5529 diag::warn_omp_allocate_thread_on_task_target_directive)
5530 << getOpenMPDirectiveName(Stack->getCurrentDirective());
5532 for (
Expr *E : AC->varlists()) {
5535 Expr *SimpleRefExpr = E;
5538 DSAStackTy::DSAVarData
Data = Stack->getTopDSA(VD,
false);
5541 diag::err_omp_expected_private_copy_for_allocate);
5544 VarDecl *PrivateVD = DeclToCopy[VD];
5546 AllocatorKind, AC->getAllocator()))
5549 Expr *Alignment =
nullptr;
5566 CaptureVars(
Sema &Actions) : BaseTransform(Actions) {}
5568 bool AlwaysRebuild() {
return true; }
5581 BodyStmts.push_back(NewDeclStmt);
5619 DistParam, LogicalTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5630 auto BuildVarRef = [&](
VarDecl *VD) {
5643 Actions.
BuildBinOp(
nullptr, {}, BO_LT, BuildVarRef(NewStep),
Zero));
5647 nullptr, {}, BO_Sub, BuildVarRef(NewStop), BuildVarRef(NewStart)));
5651 nullptr, {}, BO_Div, ForwardRange, BuildVarRef(NewStep)));
5655 nullptr, {}, BO_Sub, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5659 Actions.
BuildUnaryOp(
nullptr, {}, UO_Minus, BuildVarRef(NewStep)));
5661 Actions.
BuildBinOp(
nullptr, {}, BO_Div, BackwardRange, NegIncAmount));
5665 {}, {}, IsNegStep, BackwardDist, ForwardDist));
5667 assert((Rel == BO_LT || Rel == BO_LE || Rel == BO_GE || Rel == BO_GT) &&
5668 "Expected one of these relational operators");
5675 nullptr, {}, Rel, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5679 if (Rel == BO_GE || Rel == BO_GT)
5681 nullptr, {}, BO_Sub, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5684 nullptr, {}, BO_Sub, BuildVarRef(NewStop), BuildVarRef(NewStart)));
5690 if (Rel == BO_LE || Rel == BO_GE) {
5701 Expr *Divisor = BuildVarRef(NewStep);
5702 if (Rel == BO_GE || Rel == BO_GT)
5705 Expr *DivisorMinusOne =
5708 Actions.
BuildBinOp(
nullptr, {}, BO_Add, Range, DivisorMinusOne));
5710 Actions.
BuildBinOp(
nullptr, {}, BO_Div, RangeRoundUp, Divisor));
5720 Actions.
getCurScope(), {}, BO_Assign, DistRef, Dist));
5721 BodyStmts.push_back(ResultAssign);
5726 return cast<CapturedStmt>(
5753 {
"Logical", LogicalTy},
5764 assert(!Invalid &&
"Expecting capture-by-value to work.");
5769 auto *CS = cast<CapturedDecl>(Actions.
CurContext);
5773 TargetParam, LoopVarTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5776 IndvarParam, LogicalTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5779 CaptureVars Recap(Actions);
5784 Actions.
BuildBinOp(
nullptr, {}, BO_Mul, NewStep, LogicalRef));
5799 BO_Assign, TargetRef, Advanced));
5801 return cast<CapturedStmt>(
5812 if (
auto *For = dyn_cast<ForStmt>(AStmt)) {
5813 Stmt *Init = For->getInit();
5814 if (
auto *LCVarDeclStmt = dyn_cast<DeclStmt>(Init)) {
5816 LIVDecl = cast<VarDecl>(LCVarDeclStmt->getSingleDecl());
5817 }
else if (
auto *LCAssign = dyn_cast<BinaryOperator>(Init)) {
5819 assert(LCAssign->getOpcode() == BO_Assign &&
5820 "init part must be a loop variable assignment");
5821 auto *CounterRef = cast<DeclRefExpr>(LCAssign->getLHS());
5822 LIVDecl = cast<VarDecl>(CounterRef->getDecl());
5824 llvm_unreachable(
"Cannot determine loop variable");
5827 Cond = For->getCond();
5828 Inc = For->getInc();
5829 }
else if (
auto *RangeFor = dyn_cast<CXXForRangeStmt>(AStmt)) {
5830 DeclStmt *BeginStmt = RangeFor->getBeginStmt();
5832 LUVDecl = RangeFor->getLoopVariable();
5834 Cond = RangeFor->getCond();
5835 Inc = RangeFor->getInc();
5837 llvm_unreachable(
"unhandled kind of loop");
5846 if (
auto *CondBinExpr = dyn_cast<BinaryOperator>(Cond)) {
5847 LHS = CondBinExpr->getLHS();
5848 RHS = CondBinExpr->getRHS();
5849 CondRel = CondBinExpr->getOpcode();
5850 }
else if (
auto *CondCXXOp = dyn_cast<CXXOperatorCallExpr>(Cond)) {
5851 assert(CondCXXOp->getNumArgs() == 2 &&
"Comparison should have 2 operands");
5852 LHS = CondCXXOp->getArg(0);
5853 RHS = CondCXXOp->getArg(1);
5854 switch (CondCXXOp->getOperator()) {
5855 case OO_ExclaimEqual:
5867 case OO_GreaterEqual:
5871 llvm_unreachable(
"unexpected iterator operator");
5874 llvm_unreachable(
"unexpected loop condition");
5878 cast<DeclRefExpr>(LHS->
IgnoreImplicit())->getDecl() != LIVDecl) {
5879 std::swap(LHS, RHS);
5896 if (
auto *IncUn = dyn_cast<UnaryOperator>(Inc)) {
5898 switch (IncUn->getOpcode()) {
5908 llvm_unreachable(
"unhandled unary increment operator");
5912 }
else if (
auto *IncBin = dyn_cast<BinaryOperator>(Inc)) {
5913 if (IncBin->getOpcode() == BO_AddAssign) {
5914 Step = IncBin->getRHS();
5915 }
else if (IncBin->getOpcode() == BO_SubAssign) {
5919 llvm_unreachable(
"unhandled binary increment operator");
5920 }
else if (
auto *CondCXXOp = dyn_cast<CXXOperatorCallExpr>(Inc)) {
5921 switch (CondCXXOp->getOperator()) {
5931 Step = CondCXXOp->getArg(1);
5935 BuildUnaryOp(
nullptr, {}, UO_Minus, CondCXXOp->getArg(1)));
5938 llvm_unreachable(
"unhandled overloaded increment operator");
5941 llvm_unreachable(
"unknown increment expression");
5946 *
this, LVTy, LogicalTy, CounterRef, Step, isa<CXXForRangeStmt>(AStmt));
5948 {},
nullptr,
nullptr, {},
nullptr);
5950 LoopVarFunc, LVRef);
5955 if (isa<ForStmt>(AStmt) || isa<CXXForRangeStmt>(AStmt))
5962 "Loop transformation directive expected");
5963 return LoopTransform;
5970 Expr *UnresolvedMapper);
5982 for (
int Cnt = 0, EndCnt = Clauses.size(); Cnt < EndCnt; ++Cnt) {
5983 auto *C = dyn_cast<OMPMapClause>(Clauses[Cnt]);
5987 auto *MI = C->mapperlist_begin();
5988 for (
auto I = C->varlist_begin(),
End = C->varlist_end(); I !=
End;
6008 ElemType = ATy->getElementType();
6011 CanonType = ElemType;
6016 1, {CanonType,
nullptr});
6017 llvm::DenseMap<const Type *, Expr *> Visited;
6020 while (!Types.empty()) {
6023 std::tie(BaseType, CurFD) = Types.pop_back_val();
6024 while (ParentChain.back().second == 0)
6025 ParentChain.pop_back();
6026 --ParentChain.back().second;
6033 auto It = Visited.find(BaseType.
getTypePtr());
6034 if (It == Visited.end()) {
6042 S, Stack->getCurScope(), MapperIdScopeSpec, DefaultMapperId,
6046 It = Visited.try_emplace(BaseType.
getTypePtr(), ER.
get()).first;
6053 Expr *BaseExpr = OE;
6054 for (
const auto &
P : ParentChain) {
6072 SubExprs.push_back(BaseExpr);
6076 bool FirstIter =
true;
6086 ParentChain.emplace_back(CurFD, 1);
6088 ++ParentChain.back().second;
6090 Types.emplace_back(FieldTy, FD);
6094 if (SubExprs.empty())
6099 nullptr, C->getMapTypeModifiers(), C->getMapTypeModifiersLoc(),
6100 MapperIdScopeSpec, MapperId, C->getMapType(),
6103 Clauses.push_back(NewClause);
6114 OMPExecutableDirective::getSingleClause<OMPBindClause>(Clauses))
6115 BindKind = BC->getBindKind();
6119 BindKind, StartLoc))
6124 bool ErrorFound =
false;
6125 ClausesWithImplicit.append(Clauses.begin(), Clauses.end());
6127 Kind != OMPD_critical &&
Kind != OMPD_section &&
Kind != OMPD_master &&
6129 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
6132 DSAAttrChecker DSAChecker(
DSAStack, *
this, cast<CapturedStmt>(AStmt));
6135 while (--ThisCaptureLevel >= 0)
6136 S = cast<CapturedStmt>(S)->getCapturedStmt();
6137 DSAChecker.Visit(S);
6141 auto *CS = cast<CapturedStmt>(AStmt);
6145 if (CaptureRegions.size() > 1 && CaptureRegions.front() == OMPD_task)
6147 DSAChecker.visitSubCaptures(CS);
6149 if (DSAChecker.isErrorFound())
6152 VarsWithInheritedDSA = DSAChecker.getVarsWithInheritedDSA();
6155 DSAChecker.getImplicitFirstprivate().begin(),
6156 DSAChecker.getImplicitFirstprivate().end());
6158 DSAChecker.getImplicitPrivate().begin(),
6159 DSAChecker.getImplicitPrivate().end());
6160 const unsigned DefaultmapKindNum = OMPC_DEFAULTMAP_pointer + 1;
6163 ImplicitMapModifiers[DefaultmapKindNum];
6165 ImplicitMapModifiersLoc[DefaultmapKindNum];
6169 if (
auto *DMC = dyn_cast<OMPDefaultmapClause>(C))
6170 if (DMC->getDefaultmapModifier() == OMPC_DEFAULTMAP_MODIFIER_present)
6171 PresentModifierLocs[DMC->getDefaultmapKind()] =
6172 DMC->getDefaultmapModifierLoc();
6174 for (
unsigned VC = 0; VC < DefaultmapKindNum; ++VC) {
6176 for (
unsigned I = 0; I < OMPC_MAP_delete; ++I) {
6179 ImplicitMaps[VC][I].append(ImplicitMap.begin(), ImplicitMap.end());
6182 DSAChecker.getImplicitMapModifier(
Kind);
6183 ImplicitMapModifiers[VC].append(ImplicitModifier.begin(),
6184 ImplicitModifier.end());
6185 std::fill_n(std::back_inserter(ImplicitMapModifiersLoc[VC]),
6186 ImplicitModifier.size(), PresentModifierLocs[VC]);
6190 if (
auto *IRC = dyn_cast<OMPInReductionClause>(C)) {
6191 for (
Expr *E : IRC->taskgroup_descriptors())
6193 ImplicitFirstprivates.emplace_back(E);
6198 if (
auto *DC = dyn_cast<OMPDetachClause>(C))
6199 ImplicitFirstprivates.push_back(DC->getEventHandler());
6201 if (!ImplicitFirstprivates.empty()) {
6205 ClausesWithImplicit.push_back(Implicit);
6206 ErrorFound = cast<OMPFirstprivateClause>(Implicit)->varlist_size() !=
6207 ImplicitFirstprivates.size();
6212 if (!ImplicitPrivates.empty()) {
6216 ClausesWithImplicit.push_back(Implicit);
6217 ErrorFound = cast<OMPPrivateClause>(Implicit)->varlist_size() !=
6218 ImplicitPrivates.size();
6231 if (
auto *RC = dyn_cast<OMPReductionClause>(C))
6232 for (
Expr *E : RC->varlists())
6234 ImplicitExprs.emplace_back(E);
6236 if (!ImplicitExprs.empty()) {
6242 MapperIdScopeSpec, MapperId, OMPC_MAP_tofrom,
6245 ClausesWithImplicit.emplace_back(Implicit);
6248 for (
unsigned I = 0, E = DefaultmapKindNum; I < E; ++I) {
6249 int ClauseKindCnt = -1;
6252 if (ImplicitMap.empty())
6258 nullptr, ImplicitMapModifiers[I], ImplicitMapModifiersLoc[I],
6259 MapperIdScopeSpec, MapperId,
Kind,
true,
6262 ClausesWithImplicit.emplace_back(Implicit);
6263 ErrorFound |= cast<OMPMapClause>(Implicit)->varlist_size() !=
6274 ClausesWithImplicit);
6282 AllowedNameModifiers.push_back(OMPD_parallel);
6286 VarsWithInheritedDSA);
6288 AllowedNameModifiers.push_back(OMPD_simd);
6300 VarsWithInheritedDSA);
6304 EndLoc, VarsWithInheritedDSA);
6306 AllowedNameModifiers.push_back(OMPD_simd);
6313 assert(ClausesWithImplicit.empty() &&
6314 "No clauses are allowed for 'omp section' directive");
6322 assert(ClausesWithImplicit.empty() &&
6323 "No clauses are allowed for 'omp master' directive");
6334 case OMPD_parallel_for:
6336 EndLoc, VarsWithInheritedDSA);
6337 AllowedNameModifiers.push_back(OMPD_parallel);
6339 case OMPD_parallel_for_simd:
6341 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6342 AllowedNameModifiers.push_back(OMPD_parallel);
6344 AllowedNameModifiers.push_back(OMPD_simd);
6346 case OMPD_parallel_master:
6349 AllowedNameModifiers.push_back(OMPD_parallel);
6351 case OMPD_parallel_masked:
6354 AllowedNameModifiers.push_back(OMPD_parallel);
6356 case OMPD_parallel_sections:
6359 AllowedNameModifiers.push_back(OMPD_parallel);
6364 AllowedNameModifiers.push_back(OMPD_task);
6366 case OMPD_taskyield:
6367 assert(ClausesWithImplicit.empty() &&
6368 "No clauses are allowed for 'omp taskyield' directive");
6369 assert(AStmt ==
nullptr &&
6370 "No associated statement allowed for 'omp taskyield' directive");
6374 assert(AStmt ==
nullptr &&
6375 "No associated statement allowed for 'omp error' directive");
6379 assert(ClausesWithImplicit.empty() &&
6380 "No clauses are allowed for 'omp barrier' directive");
6381 assert(AStmt ==
nullptr &&
6382 "No associated statement allowed for 'omp barrier' directive");
6386 assert(AStmt ==
nullptr &&
6387 "No associated statement allowed for 'omp taskwait' directive");
6390 case OMPD_taskgroup:
6395 assert(AStmt ==
nullptr &&
6396 "No associated statement allowed for 'omp flush' directive");
6400 assert(AStmt ==
nullptr &&
6401 "No associated statement allowed for 'omp depobj' directive");
6405 assert(AStmt ==
nullptr &&
6406 "No associated statement allowed for 'omp scan' directive");
6424 AllowedNameModifiers.push_back(OMPD_target);
6426 case OMPD_target_parallel:
6429 AllowedNameModifiers.push_back(OMPD_target);
6430 AllowedNameModifiers.push_back(OMPD_parallel);
6432 case OMPD_target_parallel_for:
6434 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6435 AllowedNameModifiers.push_back(OMPD_target);
6436 AllowedNameModifiers.push_back(OMPD_parallel);
6438 case OMPD_cancellation_point:
6439 assert(ClausesWithImplicit.empty() &&
6440 "No clauses are allowed for 'omp cancellation point' directive");
6441 assert(AStmt ==
nullptr &&
"No associated statement allowed for 'omp "
6442 "cancellation point' directive");
6446 assert(AStmt ==
nullptr &&
6447 "No associated statement allowed for 'omp cancel' directive");
6450 AllowedNameModifiers.push_back(OMPD_cancel);
6452 case OMPD_target_data:
6455 AllowedNameModifiers.push_back(OMPD_target_data);
6457 case OMPD_target_enter_data:
6460 AllowedNameModifiers.push_back(OMPD_target_enter_data);
6462 case OMPD_target_exit_data:
6465 AllowedNameModifiers.push_back(OMPD_target_exit_data);
6469 EndLoc, VarsWithInheritedDSA);
6470 AllowedNameModifiers.push_back(OMPD_taskloop);
6472 case OMPD_taskloop_simd:
6474 EndLoc, VarsWithInheritedDSA);
6475 AllowedNameModifiers.push_back(OMPD_taskloop);
6477 AllowedNameModifiers.push_back(OMPD_simd);
6479 case OMPD_master_taskloop:
6481 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6482 AllowedNameModifiers.push_back(OMPD_taskloop);
6484 case OMPD_masked_taskloop:
6486 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6487 AllowedNameModifiers.push_back(OMPD_taskloop);
6489 case OMPD_master_taskloop_simd:
6491 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6492 AllowedNameModifiers.push_back(OMPD_taskloop);
6494 AllowedNameModifiers.push_back(OMPD_simd);
6496 case OMPD_masked_taskloop_simd:
6498 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6500 AllowedNameModifiers.push_back(OMPD_taskloop);
6501 AllowedNameModifiers.push_back(OMPD_simd);
6504 case OMPD_parallel_master_taskloop:
6506 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6507 AllowedNameModifiers.push_back(OMPD_taskloop);
6508 AllowedNameModifiers.push_back(OMPD_parallel);
6510 case OMPD_parallel_masked_taskloop:
6512 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6514 AllowedNameModifiers.push_back(OMPD_taskloop);
6515 AllowedNameModifiers.push_back(OMPD_parallel);
6518 case OMPD_parallel_master_taskloop_simd:
6520 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6521 AllowedNameModifiers.push_back(OMPD_taskloop);
6522 AllowedNameModifiers.push_back(OMPD_parallel);
6524 AllowedNameModifiers.push_back(OMPD_simd);
6526 case OMPD_parallel_masked_taskloop_simd:
6528 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6530 AllowedNameModifiers.push_back(OMPD_taskloop);
6531 AllowedNameModifiers.push_back(OMPD_parallel);
6532 AllowedNameModifiers.push_back(OMPD_simd);
6535 case OMPD_distribute:
6537 EndLoc, VarsWithInheritedDSA);
6539 case OMPD_target_update:
6542 AllowedNameModifiers.push_back(OMPD_target_update);
6544 case OMPD_distribute_parallel_for:
6546 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6547 AllowedNameModifiers.push_back(OMPD_parallel);
6549 case OMPD_distribute_parallel_for_simd:
6551 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6552 AllowedNameModifiers.push_back(OMPD_parallel);
6554 AllowedNameModifiers.push_back(OMPD_simd);
6556 case OMPD_distribute_simd:
6558 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6560 AllowedNameModifiers.push_back(OMPD_simd);
6562 case OMPD_target_parallel_for_simd:
6564 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6565 AllowedNameModifiers.push_back(OMPD_target);
6566 AllowedNameModifiers.push_back(OMPD_parallel);
6568 AllowedNameModifiers.push_back(OMPD_simd);
6570 case OMPD_target_simd:
6572 EndLoc, VarsWithInheritedDSA);
6573 AllowedNameModifiers.push_back(OMPD_target);
6575 AllowedNameModifiers.push_back(OMPD_simd);
6577 case OMPD_teams_distribute:
6579 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6581 case OMPD_teams_distribute_simd:
6583 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6585 AllowedNameModifiers.push_back(OMPD_simd);
6587 case OMPD_teams_distribute_parallel_for_simd:
6589 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6590 AllowedNameModifiers.push_back(OMPD_parallel);
6592 AllowedNameModifiers.push_back(OMPD_simd);
6594 case OMPD_teams_distribute_parallel_for:
6596 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6597 AllowedNameModifiers.push_back(OMPD_parallel);
6599 case OMPD_target_teams:
6602 AllowedNameModifiers.push_back(OMPD_target);
6604 case OMPD_target_teams_distribute:
6606 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6607 AllowedNameModifiers.push_back(OMPD_target);
6609 case OMPD_target_teams_distribute_parallel_for:
6611 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6612 AllowedNameModifiers.push_back(OMPD_target);
6613 AllowedNameModifiers.push_back(OMPD_parallel);
6615 case OMPD_target_teams_distribute_parallel_for_simd:
6617 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6618 AllowedNameModifiers.push_back(OMPD_target);
6619 AllowedNameModifiers.push_back(OMPD_parallel);
6621 AllowedNameModifiers.push_back(OMPD_simd);
6623 case OMPD_target_teams_distribute_simd:
6625 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6626 AllowedNameModifiers.push_back(OMPD_target);
6628 AllowedNameModifiers.push_back(OMPD_simd);
6631 assert(AStmt ==
nullptr &&
6632 "No associated statement allowed for 'omp interop' directive");
6641 EndLoc, VarsWithInheritedDSA);
6643 case OMPD_teams_loop:
6645 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6647 case OMPD_target_teams_loop:
6649 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6651 case OMPD_parallel_loop:
6653 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6655 case OMPD_target_parallel_loop:
6657 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6659 case OMPD_declare_target:
6660 case OMPD_end_declare_target:
6661 case OMPD_threadprivate:
6663 case OMPD_declare_reduction:
6664 case OMPD_declare_mapper:
6665 case OMPD_declare_simd:
6667 case OMPD_declare_variant:
6668 case OMPD_begin_declare_variant:
6669 case OMPD_end_declare_variant:
6670 llvm_unreachable(
"OpenMP Directive is not allowed");
6673 llvm_unreachable(
"Unknown OpenMP directive");
6676 ErrorFound = Res.
isInvalid() || ErrorFound;
6680 if (
DSAStack->getDefaultDSA() == DSA_none ||
6681 DSAStack->getDefaultDSA() == DSA_private ||
6682 DSAStack->getDefaultDSA() == DSA_firstprivate) {
6683 DSAAttrChecker DSAChecker(
DSAStack, *
this,
nullptr);
6685 switch (C->getClauseKind()) {
6686 case OMPC_num_threads:
6687 case OMPC_dist_schedule:
6694 cast<OMPIfClause>(C)->getNameModifier() != OMPD_target)
6698 cast<OMPIfClause>(C)->getNameModifier() != OMPD_parallel)
6704 case OMPC_grainsize:
6705 case OMPC_num_tasks:
6708 case OMPC_novariants:
6709 case OMPC_nocontext:
6716 case OMPC_num_teams:
6717 case OMPC_thread_limit:
6724 case OMPC_proc_bind:
6726 case OMPC_firstprivate:
6727 case OMPC_lastprivate:
6729 case OMPC_reduction:
6730 case OMPC_task_reduction:
6731 case OMPC_in_reduction:
6735 case OMPC_copyprivate:
6738 case OMPC_mergeable:
6755 case OMPC_defaultmap:
6758 case OMPC_use_device_ptr:
6759 case OMPC_use_device_addr:
6760 case OMPC_is_device_ptr:
6761 case OMPC_has_device_addr:
6762 case OMPC_nontemporal:
6765 case OMPC_inclusive:
6766 case OMPC_exclusive:
6767 case OMPC_uses_allocators:
6772 case OMPC_allocator:
6775 case OMPC_threadprivate:
6778 case OMPC_unified_address:
6779 case OMPC_unified_shared_memory:
6780 case OMPC_reverse_offload:
6781 case OMPC_dynamic_allocators:
6782 case OMPC_atomic_default_mem_order:
6783 case OMPC_device_type:
6790 llvm_unreachable(
"Unexpected clause");
6792 for (
Stmt *CC : C->children()) {
6794 DSAChecker.Visit(CC);
6797 for (
const auto &
P : DSAChecker.getVarsWithInheritedDSA())
6798 VarsWithInheritedDSA[
P.getFirst()] =
P.getSecond();
6800 for (
const auto &
P : VarsWithInheritedDSA) {
6801 if (
P.getFirst()->isImplicit() || isa<OMPCapturedExprDecl>(
P.getFirst()))
6804 if (
DSAStack->getDefaultDSA() == DSA_none ||
6805 DSAStack->getDefaultDSA() == DSA_private ||
6806 DSAStack->getDefaultDSA() == DSA_firstprivate) {
6807 Diag(
P.second->getExprLoc(), diag::err_omp_no_dsa_for_variable)
6808 <<
P.first <<
P.second->getSourceRange();
6809 Diag(
DSAStack->getDefaultDSALocation(), diag::note_omp_default_dsa_none);
6811 Diag(
P.second->getExprLoc(),
6812 diag::err_omp_defaultmap_no_attr_for_variable)
6813 <<
P.first <<
P.second->getSourceRange();
6815 diag::note_omp_defaultmap_attr_none);
6819 if (!AllowedNameModifiers.empty())
6833 DSAStack->addTargetDirLocation(StartLoc);
6844 assert(Aligneds.size() == Alignments.size());
6845 assert(Linears.size() == LinModifiers.size());
6846 assert(Linears.size() == Steps.size());
6847 if (!DG || DG.
get().isNull())
6850 const int SimdId = 0;
6851 if (!DG.
get().isSingleDecl()) {
6852 Diag(SR.
getBegin(), diag::err_omp_single_decl_in_declare_simd_variant)
6856 Decl *ADecl = DG.
get().getSingleDecl();
6857 if (
auto *FTD = dyn_cast<FunctionTemplateDecl>(ADecl))
6858 ADecl = FTD->getTemplatedDecl();
6860 auto *FD = dyn_cast<FunctionDecl>(ADecl);
6862 Diag(ADecl->
getLocation(), diag::err_omp_function_expected) << SimdId;
6871 SL = VerifyPositiveIntegerConstantInClause(Simdlen, OMPC_simdlen);
6878 llvm::DenseMap<const Decl *, const Expr *> UniformedArgs;
6879 const Expr *UniformedLinearThis =
nullptr;
6880 for (
const Expr *E : Uniforms) {
6882 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6883 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl()))
6884 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6885 FD->getParamDecl(PVD->getFunctionScopeIndex())
6887 UniformedArgs.try_emplace(PVD->getCanonicalDecl(), E);
6890 if (isa<CXXThisExpr>(E)) {
6891 UniformedLinearThis = E;
6895 << FD->
getDeclName() << (isa<CXXMethodDecl>(ADecl) ? 1 : 0);
6905 llvm::DenseMap<const Decl *, const Expr *> AlignedArgs;
6906 const Expr *AlignedThis =
nullptr;
6907 for (
const Expr *E : Aligneds) {
6909 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6910 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
6912 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6913 FD->getParamDecl(PVD->getFunctionScopeIndex())
6917 if (AlignedArgs.count(CanonPVD) > 0) {
6919 << 1 << getOpenMPClauseName(OMPC_aligned)
6921 Diag(AlignedArgs[CanonPVD]->getExprLoc(),
6922 diag::note_omp_explicit_dsa)
6923 << getOpenMPClauseName(OMPC_aligned);
6926 AlignedArgs[CanonPVD] = E;
6928 .getNonReferenceType()
6929 .getUnqualifiedType()
6930 .getCanonicalType();
6933 Diag(E->
getExprLoc(), diag::err_omp_aligned_expected_array_or_ptr)
6935 Diag(PVD->getLocation(), diag::note_previous_decl) << PVD;
6940 if (isa<CXXThisExpr>(E)) {
6943 << 2 << getOpenMPClauseName(OMPC_aligned) << E->
getSourceRange();
6945 << getOpenMPClauseName(OMPC_aligned);
6951 << FD->
getDeclName() << (isa<CXXMethodDecl>(ADecl) ? 1 : 0);
6958 for (
Expr *E : Alignments) {
6961 Align = VerifyPositiveIntegerConstantInClause(E, OMPC_aligned);
6962 NewAligns.push_back(Align.
get());
6973 llvm::DenseMap<const Decl *, const Expr *> LinearArgs;
6974 const bool IsUniformedThis = UniformedLinearThis !=
nullptr;
6975 auto MI = LinModifiers.begin();
6976 for (
const Expr *E : Linears) {
6980 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6981 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
6983 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6984 FD->getParamDecl(PVD->getFunctionScopeIndex())
6988 if (LinearArgs.count(CanonPVD) > 0) {
6990 << getOpenMPClauseName(OMPC_linear)
6992 Diag(LinearArgs[CanonPVD]->getExprLoc(),
6993 diag::note_omp_explicit_dsa)
6994 << getOpenMPClauseName(OMPC_linear);
6998 if (UniformedArgs.count(CanonPVD) > 0) {
7000 << getOpenMPClauseName(OMPC_linear)
7002 Diag(UniformedArgs[CanonPVD]->getExprLoc(),
7003 diag::note_omp_explicit_dsa)
7004 << getOpenMPClauseName(OMPC_uniform);
7007 LinearArgs[CanonPVD] = E;
7013 PVD->getOriginalType(),
7018 if (isa<CXXThisExpr>(E)) {
7019 if (UniformedLinearThis) {
7021 << getOpenMPClauseName(OMPC_linear)
7022 << getOpenMPClauseName(IsUniformedThis ? OMPC_uniform : OMPC_linear)
7024 Diag(UniformedLinearThis->
getExprLoc(), diag::note_omp_explicit_dsa)
7025 << getOpenMPClauseName(IsUniformedThis ? OMPC_uniform
7029 UniformedLinearThis = E;
7038 << FD->
getDeclName() << (isa<CXXMethodDecl>(ADecl) ? 1 : 0);
7040 Expr *Step =
nullptr;
7041 Expr *NewStep =
nullptr;
7043 for (
Expr *E : Steps) {
7045 if (Step == E || !E) {
7046 NewSteps.push_back(E ? NewStep :
nullptr);
7050 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Step))
7051 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
7053 if (UniformedArgs.count(CanonPVD) == 0) {
7060 NewSteps.push_back(Step);
7077 NewSteps.push_back(NewStep);
7079 auto *NewAttr = OMPDeclareSimdDeclAttr::CreateImplicit(
7081 Uniforms.size(),
const_cast<Expr **
>(Aligneds.data()), Aligneds.size(),
7082 const_cast<Expr **
>(NewAligns.data()), NewAligns.size(),
7083 const_cast<Expr **
>(Linears.data()), Linears.size(),
7084 const_cast<unsigned *
>(LinModifiers.data()), LinModifiers.size(),
7085 NewSteps.data(), NewSteps.size(), SR);
7093 "Expected function type with prototype.");
7095 "Expected function with type with no prototype.");
7097 "Expected function with prototype.");
7105 Param->setScopeInfo(0, Params.size());
7106 Param->setImplicit();
7107 Params.push_back(Param);
7110 FD->setParams(Params);
7117 if (
auto *UTemplDecl = dyn_cast<FunctionTemplateDecl>(D))
7118 FD = UTemplDecl->getTemplatedDecl();
7120 FD = cast<FunctionDecl>(D);
7121 assert(FD &&
"Expected a function declaration!");
7127 for (AssumptionAttr *AA : OMPAssumeScoped)
7130 for (AssumptionAttr *AA : OMPAssumeGlobal)
7134 Sema::OMPDeclareVariantScope::OMPDeclareVariantScope(
OMPTraitInfo &TI)
7135 : TI(&TI), NameSuffix(TI.getMangledName()) {}
7143 OMPDeclareVariantScope &DVScope = OMPDeclareVariantScopes.back();
7146 bool IsTemplated = !TemplateParamLists.empty();
7148 !DVScope.TI->isExtensionActive(
7149 llvm::omp::TraitProperty::implementation_extension_allow_templates))
7165 for (
auto *Candidate : Lookup) {
7166 auto *CandidateDecl = Candidate->getUnderlyingDecl();
7168 if (IsTemplated && isa<FunctionTemplateDecl>(CandidateDecl)) {
7169 auto *FTD = cast<FunctionTemplateDecl>(CandidateDecl);
7170 if (FTD->getTemplateParameters()->size() == TemplateParamLists.size())
7171 UDecl = FTD->getTemplatedDecl();
7172 }
else if (!IsTemplated)
7173 UDecl = dyn_cast<FunctionDecl>(CandidateDecl);
7187 FType, UDeclTy,
false,
7194 Bases.push_back(UDecl);
7197 bool UseImplicitBase = !DVScope.TI->isExtensionActive(
7198 llvm::omp::TraitProperty::implementation_extension_disable_implicit_base);
7200 if (Bases.empty() && UseImplicitBase) {
7204 if (
auto *BaseTemplD = dyn_cast<FunctionTemplateDecl>(BaseD))
7205 Bases.push_back(BaseTemplD->getTemplatedDecl());
7207 Bases.push_back(cast<FunctionDecl>(BaseD));
7213 MangledName += DVScope.NameSuffix;
7228 if (
auto *UTemplDecl = dyn_cast<FunctionTemplateDecl>(D))
7229 FD = UTemplDecl->getTemplatedDecl();
7231 FD = cast<FunctionDecl>(D);
7238 OMPDeclareVariantScope &DVScope = OMPDeclareVariantScopes.back();
7239 auto *OMPDeclareVariantA = OMPDeclareVariantAttr::CreateImplicit(
7240 Context, VariantFuncRef, DVScope.TI,
7245 BaseFD->addAttr(OMPDeclareVariantA);
7254 CallExpr *CE = dyn_cast<CallExpr>(Call.get());
7263 CalleeFnDecl->
getName().startswith_insensitive(
"omp_")) {
7266 Diag(LParenLoc, diag::err_omp_unexpected_call_to_omp_runtime_api);
7269 if (!CalleeFnDecl->
hasAttr<OMPDeclareVariantAttr>())
7273 std::function<void(StringRef)> DiagUnknownTrait = [
this,
7274 CE](StringRef ISATrait) {
7287 while (CalleeFnDecl) {
7288 for (OMPDeclareVariantAttr *A :
7290 Expr *VariantRef = A->getVariantFuncRef();
7292 VariantMatchInfo VMI;
7295 if (!isVariantApplicableInContext(VMI, OMPCtx,
7299 VMIs.push_back(VMI);
7300 Exprs.push_back(VariantRef);
7308 int BestIdx = getBestVariantMatchForContext(VMIs, OMPCtx);
7311 Expr *BestExpr = cast<DeclRefExpr>(Exprs[BestIdx]);
7312 Decl *BestDecl = cast<DeclRefExpr>(BestExpr)->getDecl();
7328 if (
auto *SpecializedMethod = dyn_cast<CXXMethodDecl>(BestDecl)) {
7329 auto *MemberCall = dyn_cast<CXXMemberCallExpr>(CE);
7331 Context, MemberCall->getImplicitObjectArgument(),
7333 MemberCall->getValueKind(), MemberCall->getObjectKind());
7338 if (
CallExpr *NCE = dyn_cast<CallExpr>(NewCall.
get())) {
7339 FunctionDecl *NewCalleeFnDecl = NCE->getDirectCallee();
7341 CalleeFnType, NewCalleeFnDecl->
getType(),
7352 VMIs.erase(VMIs.begin() + BestIdx);
7353 Exprs.erase(Exprs.begin() + BestIdx);
7354 }
while (!VMIs.empty());
7361 std::optional<std::pair<FunctionDecl *, Expr *>>
7364 unsigned NumAppendArgs,
7366 if (!DG || DG.
get().isNull())
7367 return std::nullopt;
7369 const int VariantId = 1;
7371 if (!DG.
get().isSingleDecl()) {
7372 Diag(SR.
getBegin(), diag::err_omp_single_decl_in_declare_simd_variant)
7374 return std::nullopt;
7376 Decl *ADecl = DG.
get().getSingleDecl();
7377 if (
auto *FTD = dyn_cast<FunctionTemplateDecl>(ADecl))
7378 ADecl = FTD->getTemplatedDecl();
7381 auto *FD = dyn_cast<FunctionDecl>(ADecl);
7385 return std::nullopt;
7388 auto &&HasMultiVersionAttributes = [](
const FunctionDecl *FD) {
7391 return FD->isMultiVersion() || FD->
hasAttr<TargetAttr>();
7394 if (HasMultiVersionAttributes(FD)) {
7395 Diag(FD->
getLocation(), diag::err_omp_declare_variant_incompat_attributes)
7397 return std::nullopt;
7402 Diag(SR.
getBegin(), diag::warn_omp_declare_variant_after_used)
7407 if (!FD->isThisDeclarationADefinition() && FD->isDefined(Definition) &&
7409 Diag(SR.
getBegin(), diag::warn_omp_declare_variant_after_emitted)
7414 Diag(SR.
getBegin(), diag::err_omp_function_expected) << VariantId;
7415 return std::nullopt;
7418 auto ShouldDelayChecks = [](
Expr *&E,
bool) {
7424 if (FD->isDependentContext() || ShouldDelayChecks(VariantRef,
false) ||
7426 return std::make_pair(FD, VariantRef);
7429 auto HandleNonConstantScoresAndConditions = [
this](
Expr *&E,
7430 bool IsScore) ->
bool {
7436 Diag(E->
getExprLoc(), diag::warn_omp_declare_variant_score_not_constant)
7444 diag::err_omp_declare_variant_user_condition_not_constant)
7450 return std::nullopt;
7453 if (NumAppendArgs) {
7456 Diag(FD->
getLocation(), diag::err_omp_declare_variant_prototype_required)
7458 return std::nullopt;
7467 TD = dyn_cast_or_null<TypeDecl>(ND);
7470 Diag(SR.
getBegin(), diag::err_omp_interop_type_not_found) << SR;
7471 return std::nullopt;
7474 if (PTy->isVariadic()) {
7475 Diag(FD->
getLocation(), diag::err_omp_append_args_with_varargs) << SR;
7476 return std::nullopt;
7479 Params.append(PTy->param_type_begin(), PTy->param_type_end());
7480 Params.insert(Params.end(), NumAppendArgs, InteropType);
7482 PTy->getExtProtoInfo());
7490 auto *Method = dyn_cast<CXXMethodDecl>(FD);
7491 if (Method && !Method->isStatic()) {
7492 const Type *ClassType =
7506 return std::nullopt;
7508 VariantRef = ER.
get();
7522 diag::err_omp_declare_variant_incompat_types)
7524 << ((Method && !Method->isStatic()) ? FnPtrType : FD->
getType())
7526 return std::nullopt;
7531 return std::nullopt;
7534 if (Method && !Method->isStatic()) {
7535 Expr *PossibleAddrOfVariantRef = VariantRefCast.
get();
7536 if (
auto *UO = dyn_cast<UnaryOperator>(
7538 VariantRefCast = UO->getSubExpr();
7547 return std::nullopt;
7555 return std::nullopt;
7557 auto *NewFD = dyn_cast_or_null<FunctionDecl>(DRE->getDecl());
7561 return std::nullopt;
7566 diag::err_omp_declare_variant_same_base_function)
7568 return std::nullopt;
7577 diag::err_omp_declare_variant_incompat_types)
7578 << NewFD->getType() << FD->
getType() << (NumAppendArgs ? 1 : 0)
7580 return std::nullopt;
7585 else if (NewFD->getType()->isFunctionNoProtoType())
7591 if (NewFD->hasAttrs() && NewFD->hasAttr<OMPDeclareVariantAttr>()) {
7593 diag::warn_omp_declare_variant_marked_as_declare_variant)
7596 NewFD->specific_attr_begin<OMPDeclareVariantAttr>()->
getRange();
7597 Diag(SR.
getBegin(), diag::note_omp_marked_declare_variant_here) << SR;
7598 return std::nullopt;
7601 enum DoesntSupport {
7610 if (
const auto *CXXFD = dyn_cast<CXXMethodDecl>(FD)) {
7611 if (CXXFD->isVirtual()) {
7614 return std::nullopt;
7617 if (isa<CXXConstructorDecl>(FD)) {
7620 return std::nullopt;
7623 if (isa<CXXDestructorDecl>(FD)) {
7626 return std::nullopt;
7630 if (FD->isDeleted()) {
7633 return std::nullopt;
7636 if (FD->isDefaulted()) {
7639 return std::nullopt;
7642 if (FD->isConstexpr()) {
7644 << (NewFD->isConsteval() ? ConstevalFuncs : ConstexprFuncs);
7645 return std::nullopt;
7655 PDiag(diag::err_omp_declare_variant_doesnt_support)),
7657 PDiag(diag::err_omp_declare_variant_diff)
7661 return std::nullopt;
7662 return std::make_pair(FD, cast<Expr>(DRE));
7678 llvm::append_range(AllAdjustArgs, AdjustArgsNothing);
7679 llvm::append_range(AllAdjustArgs, AdjustArgsNeedDevicePtr);
7681 if (!AllAdjustArgs.empty() || !AppendArgs.empty()) {
7682 VariantMatchInfo VMI;
7684 if (!llvm::is_contained(
7685 VMI.ConstructTraits,
7686 llvm::omp::TraitProperty::construct_dispatch_dispatch)) {
7687 if (!AllAdjustArgs.empty())
7688 Diag(AdjustArgsLoc, diag::err_omp_clause_requires_dispatch_construct)
7689 << getOpenMPClauseName(OMPC_adjust_args);
7690 if (!AppendArgs.empty())
7691 Diag(AppendArgsLoc, diag::err_omp_clause_requires_dispatch_construct)
7692 << getOpenMPClauseName(OMPC_append_args);
7702 for (
Expr *E : AllAdjustArgs) {
7704 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E)) {
7705 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
7707 if (FD->
getNumParams() > PVD->getFunctionScopeIndex() &&
7711 if (!AdjustVars.insert(CanonPVD).second) {
7712 Diag(DRE->getLocation(), diag::err_omp_adjust_arg_multiple_clauses)
7721 Diag(E->
getExprLoc(), diag::err_omp_param_or_this_in_clause) << FD << 0;
7725 auto *NewAttr = OMPDeclareVariantAttr::CreateImplicit(
7726 Context, VariantRef, &TI,
const_cast<Expr **
>(AdjustArgsNothing.data()),
7727 AdjustArgsNothing.size(),
7728 const_cast<Expr **
>(AdjustArgsNeedDevicePtr.data()),
7729 AdjustArgsNeedDevicePtr.size(),
7730 const_cast<OMPInteropInfo *
>(AppendArgs.data()), AppendArgs.size(), SR);
7741 auto *CS = cast<CapturedStmt>(AStmt);
7752 DSAStack->getTaskgroupReductionRef(),
7758 struct LoopIterationSpace final {
7761 bool IsStrictCompare =
false;
7763 Expr *PreCond =
nullptr;
7766 Expr *NumIterations =
nullptr;
7768 Expr *CounterVar =
nullptr;
7770 Expr *PrivateCounterVar =
nullptr;
7772 Expr *CounterInit =
nullptr;
7775 Expr *CounterStep =
nullptr;
7777 bool Subtract =
false;
7787 Expr *MinValue =
nullptr;
7791 Expr *MaxValue =
nullptr;
7793 bool IsNonRectangularLB =
false;
7795 bool IsNonRectangularUB =
false;
7798 unsigned LoopDependentIdx = 0;
7802 Expr *FinalCondition =
nullptr;
7808 class OpenMPIterationSpaceChecker {
7812 bool SupportsNonRectangular;
7828 Expr *LCRef =
nullptr;
7834 Expr *Step =
nullptr;
7841 std::optional<bool> TestIsLessOp;
7843 bool TestIsStrictOp =
false;
7845 bool SubtractStep =
false;
7850 std::optional<unsigned> InitDependOnLC;
7853 std::optional<unsigned> CondDependOnLC;
7855 std::optional<unsigned> doesDependOnLoopCounter(
const Stmt *S,
7856 bool IsInitializer);
7862 OpenMPIterationSpaceChecker(
Sema &SemaRef,
bool SupportsNonRectangular,
7864 : SemaRef(SemaRef), SupportsNonRectangular(SupportsNonRectangular),
7865 Stack(Stack), DefaultLoc(DefaultLoc), ConditionLoc(DefaultLoc) {}
7868 bool checkAndSetInit(
Stmt *S,
bool EmitDiags =
true);
7871 bool checkAndSetCond(
Expr *S);
7874 bool checkAndSetInc(
Expr *S);
7876 ValueDecl *getLoopDecl()
const {
return LCDecl; }
7878 Expr *getLoopDeclRefExpr()
const {
return LCRef; }
7880 SourceRange getInitSrcRange()
const {
return InitSrcRange; }
7882 SourceRange getConditionSrcRange()
const {
return ConditionSrcRange; }
7884 SourceRange getIncrementSrcRange()
const {
return IncrementSrcRange; }
7886 bool shouldSubtractStep()
const {
return SubtractStep; }
7888 bool isStrictTestOp()
const {
return TestIsStrictOp; }
7890 Expr *buildNumIterations(
7892 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
7896 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
7899 buildCounterVar(llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
7900 DSAStackTy &DSA)
const;
7903 Expr *buildPrivateCounterVar()
const;
7907 Expr *buildCounterStep()
const;
7911 buildOrderedLoopData(
Scope *S,
Expr *Counter,
7912 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
7916 std::pair<Expr *, Expr *> buildMinMaxValues(
7917 Scope *S, llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
7919 Expr *buildFinalCondition(
Scope *S)
const;
7921 bool dependent()
const;
7923 bool doesInitDependOnLC()
const {
return InitDependOnLC.has_value(); }
7925 bool doesCondDependOnLC()
const {
return CondDependOnLC.has_value(); }
7927 unsigned getLoopDependentIdx()
const {
7928 return InitDependOnLC.value_or(CondDependOnLC.value_or(0));
7934 bool checkAndSetIncRHS(
Expr *RHS);
7939 bool setUB(
Expr *NewUB, std::optional<bool> LessOp,
bool StrictOp,
7942 bool setStep(
Expr *NewStep,
bool Subtract);
7945 bool OpenMPIterationSpaceChecker::dependent()
const {
7947 assert(!LB && !UB && !Step);
7955 bool OpenMPIterationSpaceChecker::setLCDeclAndLB(
ValueDecl *NewLCDecl,
7957 Expr *NewLB,
bool EmitDiags) {
7959 assert(LCDecl ==
nullptr && LB ==
nullptr && LCRef ==
nullptr &&
7960 UB ==
nullptr && Step ==
nullptr && !TestIsLessOp && !TestIsStrictOp);
7964 LCRef = NewLCRefExpr;
7965 if (
auto *CE = dyn_cast_or_null<CXXConstructExpr>(NewLB))
7967 if ((Ctor->isCopyOrMoveConstructor() ||
7968 Ctor->isConvertingConstructor(
false)) &&
7969 CE->getNumArgs() > 0 && CE->getArg(0) !=
nullptr)
7973 InitDependOnLC = doesDependOnLoopCounter(LB,
true);
7977 bool OpenMPIterationSpaceChecker::setUB(
Expr *NewUB, std::optional<bool> LessOp,
7981 assert(LCDecl !=
nullptr && LB !=
nullptr && UB ==
nullptr &&
7982 Step ==
nullptr && !TestIsLessOp && !TestIsStrictOp);
7987 TestIsLessOp = LessOp;
7988 TestIsStrictOp = StrictOp;
7989 ConditionSrcRange = SR;
7991 CondDependOnLC = doesDependOnLoopCounter(UB,
false);
7995 bool OpenMPIterationSpaceChecker::setStep(
Expr *NewStep,
bool Subtract) {
7997 assert(LCDecl !=
nullptr && LB !=
nullptr && Step ==
nullptr);
8007 NewStep = Val.
get();
8020 std::optional<llvm::APSInt> Result =
8024 Result && Result->isSigned() && (Subtract != Result->isNegative());
8026 Result && Result->isSigned() && (Subtract == Result->isNegative());
8027 bool IsConstZero = Result && !Result->getBoolValue();
8031 TestIsLessOp = IsConstPos || (IsUnsigned && !Subtract);
8032 if (UB && (IsConstZero ||
8033 (*TestIsLessOp ? (IsConstNeg || (IsUnsigned && Subtract))
8034 : (IsConstPos || (IsUnsigned && !Subtract))))) {
8036 diag::err_omp_loop_incr_not_compatible)
8038 SemaRef.
Diag(ConditionLoc,
8039 diag::note_omp_loop_cond_requres_compatible_incr)
8040 << *TestIsLessOp << ConditionSrcRange;
8043 if (*TestIsLessOp == Subtract) {
8047 Subtract = !Subtract;
8052 SubtractStep = Subtract;
8059 class LoopCounterRefChecker final
8066 bool IsInitializer =
true;
8067 bool SupportsNonRectangular;
8068 unsigned BaseLoopId = 0;
8071 SemaRef.Diag(E->
getExprLoc(), diag::err_omp_stmt_depends_on_loop_counter)
8072 << (IsInitializer ? 0 : 1);
8075 const auto &&
Data = Stack.isLoopControlVariable(VD);
8081 llvm::raw_svector_ostream
OS(Name);
8085 diag::err_omp_wrong_dependency_iterator_type)
8087 SemaRef.Diag(VD->
getLocation(), diag::note_previous_decl) << VD;
8090 if (
Data.first && !SupportsNonRectangular) {
8091 SemaRef.Diag(E->
getExprLoc(), diag::err_omp_invariant_dependency);
8095 (DepDecl || (PrevDepDecl &&
8097 if (!DepDecl && PrevDepDecl)
8098 DepDecl = PrevDepDecl;
8100 llvm::raw_svector_ostream
OS(Name);
8104 diag::err_omp_invariant_or_linear_dependency)
8110 BaseLoopId =
Data.first;
8118 if (isa<VarDecl>(VD))
8119 return checkDecl(E, VD);
8125 if (isa<VarDecl>(VD) || isa<FieldDecl>(VD))
8126 return checkDecl(E, VD);
8130 bool VisitStmt(
const Stmt *S) {
8132 for (
const Stmt *Child : S->children())
8133 Res = (Child && Visit(Child)) || Res;
8136 explicit LoopCounterRefChecker(
Sema &SemaRef, DSAStackTy &Stack,
8137 const ValueDecl *CurLCDecl,
bool IsInitializer,
8139 bool SupportsNonRectangular =
true)
8140 : SemaRef(SemaRef), Stack(Stack), CurLCDecl(CurLCDecl),
8141 PrevDepDecl(PrevDepDecl), IsInitializer(IsInitializer),
8142 SupportsNonRectangular(SupportsNonRectangular) {}
8143 unsigned getBaseLoopId()
const {
8144 assert(CurLCDecl &&
"Expected loop dependency.");
8148 assert(CurLCDecl &&
"Expected loop dependency.");
8154 std::optional<unsigned>
8155 OpenMPIterationSpaceChecker::doesDependOnLoopCounter(
const Stmt *S,
8156 bool IsInitializer) {
8158 LoopCounterRefChecker LoopStmtChecker(SemaRef, Stack, LCDecl, IsInitializer,
8159 DepDecl, SupportsNonRectangular);
8160 if (LoopStmtChecker.Visit(S)) {
8161 DepDecl = LoopStmtChecker.getDepDecl();
8162 return LoopStmtChecker.getBaseLoopId();
8164 return std::nullopt;
8167 bool OpenMPIterationSpaceChecker::checkAndSetInit(
Stmt *S,
bool EmitDiags) {
8178 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_init);
8182 if (
auto *ExprTemp = dyn_cast<ExprWithCleanups>(S))
8183 if (!ExprTemp->cleanupsHaveSideEffects())
8184 S = ExprTemp->getSubExpr();
8186 InitSrcRange = S->getSourceRange();
8187 if (
Expr *E = dyn_cast<Expr>(S))
8189 if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8190 if (BO->getOpcode() == BO_Assign) {
8192 if (
auto *DRE = dyn_cast<DeclRefExpr>(LHS)) {
8193 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(DRE->getDecl()))
8195 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8197 return setLCDeclAndLB(DRE->getDecl(), DRE, BO->getRHS(), EmitDiags);
8199 if (
auto *ME = dyn_cast<MemberExpr>(LHS)) {
8200 if (ME->isArrow() &&
8201 isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
8202 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8206 }
else if (
auto *DS = dyn_cast<DeclStmt>(S)) {
8207 if (DS->isSingleDecl()) {
8208 if (
auto *Var = dyn_cast_or_null<VarDecl>(DS->getSingleDecl())) {
8209 if (Var->hasInit() && !Var->getType()->isReferenceType()) {
8212 SemaRef.Diag(S->getBeginLoc(),
8213 diag::ext_omp_loop_not_canonical_init)
8214 << S->getSourceRange();
8215 return setLCDeclAndLB(
8218 Var->getType().getNonReferenceType(),
8220 Var->getInit(), EmitDiags);
8224 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8225 if (CE->getOperator() == OO_Equal) {
8226 Expr *LHS = CE->getArg(0);
8227 if (
auto *DRE = dyn_cast<DeclRefExpr>(LHS)) {
8228 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(DRE->getDecl()))
8230 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8232 return setLCDeclAndLB(DRE->getDecl(), DRE, CE->getArg(1), EmitDiags);
8234 if (
auto *ME = dyn_cast<MemberExpr>(LHS)) {
8235 if (ME->isArrow() &&
8236 isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
8237 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8243 if (dependent() || SemaRef.CurContext->isDependentContext())
8246 SemaRef.Diag(S->getBeginLoc(), diag::err_omp_loop_not_canonical_init)
8247 << S->getSourceRange();
8258 if (
const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E))
8260 if ((Ctor->isCopyOrMoveConstructor() ||
8261 Ctor->isConvertingConstructor(
false)) &&
8262 CE->getNumArgs() > 0 && CE->getArg(0) !=
nullptr)
8264 if (
const auto *DRE = dyn_cast_or_null<DeclRefExpr>(E)) {
8265 if (
const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()))
8268 if (
const auto *ME = dyn_cast_or_null<MemberExpr>(E))
8269 if (ME->isArrow() && isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
8274 bool OpenMPIterationSpaceChecker::checkAndSetCond(
Expr *S) {
8281 bool IneqCondIsCanonical = SemaRef.getLangOpts().OpenMP >= 50;
8283 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_cond)
8284 << (IneqCondIsCanonical ? 1 : 0) << LCDecl;
8290 auto &&CheckAndSetCond =
8295 if (getInitLCDecl(LHS) == LCDecl)
8296 return setUB(
const_cast<Expr *
>(RHS),
8299 if (getInitLCDecl(RHS) == LCDecl)
8300 return setUB(
const_cast<Expr *
>(LHS),
8303 }
else if (IneqCondIsCanonical &&
Opcode == BO_NE) {
8304 return setUB(
const_cast<Expr *
>(getInitLCDecl(LHS) == LCDecl ? RHS : LHS),
8308 return std::nullopt;
8310 std::optional<bool> Res;
8311 if (
auto *RBO = dyn_cast<CXXRewrittenBinaryOperator>(S)) {
8314 RBO->getOperatorLoc());
8315 }
else if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8316 Res = CheckAndSetCond(BO->getOpcode(), BO->getLHS(), BO->getRHS(),
8317 BO->getSourceRange(), BO->getOperatorLoc());
8318 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8319 if (CE->getNumArgs() == 2) {
8320 Res = CheckAndSetCond(
8322 CE->getArg(1), CE->getSourceRange(), CE->getOperatorLoc());
8327 if (dependent() || SemaRef.CurContext->isDependentContext())
8329 SemaRef.Diag(CondLoc, diag::err_omp_loop_not_canonical_cond)
8330 << (IneqCondIsCanonical ? 1 : 0) << S->getSourceRange() << LCDecl;
8334 bool OpenMPIterationSpaceChecker::checkAndSetIncRHS(
Expr *RHS) {
8341 if (
auto *BO = dyn_cast<BinaryOperator>(RHS)) {
8342 if (BO->isAdditiveOp()) {
8343 bool IsAdd = BO->getOpcode() == BO_Add;
8344 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8345 return setStep(BO->getRHS(), !IsAdd);
8346 if (IsAdd && getInitLCDecl(BO->getRHS()) == LCDecl)
8347 return setStep(BO->getLHS(),
false);
8349 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(RHS)) {
8350 bool IsAdd = CE->getOperator() == OO_Plus;
8351 if ((IsAdd || CE->getOperator() == OO_Minus) && CE->getNumArgs() == 2) {
8352 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8353 return setStep(CE->getArg(1), !IsAdd);
8354 if (IsAdd && getInitLCDecl(CE->getArg(1)) == LCDecl)
8355 return setStep(CE->getArg(0),
false);
8358 if (dependent() || SemaRef.CurContext->isDependentContext())
8360 SemaRef.Diag(RHS->
getBeginLoc(), diag::err_omp_loop_not_canonical_incr)
8365 bool OpenMPIterationSpaceChecker::checkAndSetInc(
Expr *S) {
8380 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_incr) << LCDecl;
8383 if (
auto *ExprTemp = dyn_cast<ExprWithCleanups>(S))
8384 if (!ExprTemp->cleanupsHaveSideEffects())
8385 S = ExprTemp->getSubExpr();
8387 IncrementSrcRange = S->getSourceRange();
8388 S = S->IgnoreParens();
8389 if (
auto *UO = dyn_cast<UnaryOperator>(S)) {
8390 if (UO->isIncrementDecrementOp() &&
8391 getInitLCDecl(UO->getSubExpr()) == LCDecl)
8392 return setStep(SemaRef
8393 .ActOnIntegerConstant(UO->getBeginLoc(),
8394 (UO->isDecrementOp() ? -1 : 1))
8397 }
else if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8398 switch (BO->getOpcode()) {
8401 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8402 return setStep(BO->getRHS(), BO->getOpcode() == BO_SubAssign);
8405 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8406 return checkAndSetIncRHS(BO->getRHS());
8411 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8412 switch (CE->getOperator()) {
8415 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8416 return setStep(SemaRef
8417 .ActOnIntegerConstant(
8419 ((CE->getOperator() == OO_MinusMinus) ? -1 : 1))
8425 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8426 return setStep(CE->getArg(1), CE->getOperator() == OO_MinusEqual);
8429 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8430 return checkAndSetIncRHS(CE->getArg(1));
8436 if (dependent() || SemaRef.CurContext->isDependentContext())
8438 SemaRef.Diag(S->getBeginLoc(), diag::err_omp_loop_not_canonical_incr)
8439 << S->getSourceRange() << LCDecl;
8445 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
8446 StringRef Name =
".capture_expr.") {
8453 auto I = Captures.find(
Capture);
8454 if (I != Captures.end())
8467 bool TestIsStrictOp,
bool RoundToStep,
8468 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
8469 ExprResult NewStep = tryBuildCapture(SemaRef, Step, Captures,
".new_step");
8473 bool IsLowerConst =
false, IsStepConst =
false;
8474 if (std::optional<llvm::APSInt> Res =
8477 IsLowerConst =
true;
8479 if (std::optional<llvm::APSInt> Res =
8484 bool NoNeedToConvert = IsLowerConst && !RoundToStep &&
8485 ((!TestIsStrictOp && LRes.isNonNegative()) ||
8486 (TestIsStrictOp && LRes.isStrictlyPositive()));
8487 bool NeedToReorganize =
false;
8489 if (!NoNeedToConvert && IsLowerConst &&
8490 (TestIsStrictOp || (RoundToStep && IsStepConst))) {
8491 NoNeedToConvert =
true;
8493 unsigned BW = LRes.getBitWidth() > SRes.getBitWidth()
8494 ? LRes.getBitWidth()
8495 : SRes.getBitWidth();
8496 LRes = LRes.extend(BW + 1);
8497 LRes.setIsSigned(
true);
8498 SRes = SRes.extend(BW + 1);
8499 SRes.setIsSigned(
true);
8501 NoNeedToConvert = LRes.trunc(BW).extend(BW + 1) == LRes;
8502 LRes = LRes.trunc(BW);
8504 if (TestIsStrictOp) {
8505 unsigned BW = LRes.getBitWidth();
8506 LRes = LRes.extend(BW + 1);
8507 LRes.setIsSigned(
true);
8510 NoNeedToConvert && LRes.trunc(BW).extend(BW + 1) == LRes;
8512 LRes = LRes.trunc(BW);
8514 NeedToReorganize = NoNeedToConvert;
8517 bool IsUpperConst =
false;
8518 if (std::optional<llvm::APSInt> Res =
8521 IsUpperConst =
true;
8523 if (NoNeedToConvert && IsLowerConst && IsUpperConst &&
8524 (!RoundToStep || IsStepConst)) {
8525 unsigned BW = LRes.getBitWidth() > URes.getBitWidth() ? LRes.getBitWidth()
8526 : URes.getBitWidth();
8527 LRes = LRes.extend(BW + 1);
8528 LRes.setIsSigned(
true);
8529 URes = URes.extend(BW + 1);
8530 URes.setIsSigned(
true);
8532 NoNeedToConvert = URes.trunc(BW).extend(BW + 1) == URes;
8533 NeedToReorganize = NoNeedToConvert;
8538 if ((!NoNeedToConvert || (LRes.isNegative() && !IsUpperConst)) &&
8544 if ((LowerSize <= UpperSize && UpperTy->hasSignedIntegerRepresentation()) ||
8547 LowerSize > UpperSize ? LowerSize : UpperSize, 0);
8558 if (!Lower || !Upper || NewStep.
isInvalid())
8564 if (NeedToReorganize) {
8578 S, DefaultLoc, BO_Add, Diff.
get(),
8588 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Sub, Upper, Diff.
get());
8592 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Sub, Upper, Lower);
8608 S, DefaultLoc, BO_Sub, Diff.
get(),
8628 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Div, Diff.
get(), NewStep.
get());
8636 Expr *OpenMPIterationSpaceChecker::buildNumIterations(
8638 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8647 if (InitDependOnLC) {
8648 const LoopIterationSpace &IS = ResultIterSpaces[*InitDependOnLC - 1];
8649 if (!IS.MinValue || !IS.MaxValue)
8658 IS.CounterVar, MinValue.
get());
8663 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, LBMinVal.
get(), LBVal);
8678 IS.CounterVar, MaxValue.
get());
8683 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, LBMaxVal.
get(), LBVal);
8692 tryBuildCapture(SemaRef, LBMinVal.
get(), Captures,
".lb_min").get();
8694 tryBuildCapture(SemaRef, LBMaxVal.
get(), Captures,
".lb_max").get();
8695 if (!LBMin || !LBMax)
8699 SemaRef.
BuildBinOp(S, DefaultLoc, BO_LT, LBMin, LBMax);
8703 tryBuildCapture(SemaRef, MinLessMaxRes.
get(), Captures,
".min_less_max")
8707 if (*TestIsLessOp) {
8711 MinLessMax, LBMin, LBMax);
8714 LBVal = MinLB.
get();
8719 MinLessMax, LBMax, LBMin);
8722 LBVal = MaxLB.
get();
8726 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Assign, IS.CounterVar, LBVal);
8729 LBVal = LBMinVal.
get();
8733 if (CondDependOnLC) {
8734 const LoopIterationSpace &IS = ResultIterSpaces[*CondDependOnLC - 1];
8735 if (!IS.MinValue || !IS.MaxValue)
8744 IS.CounterVar, MinValue.
get());
8749 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, UBMinVal.
get(), UBVal);
8764 IS.CounterVar, MaxValue.
get());
8769 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, UBMaxVal.
get(), UBVal);
8778 tryBuildCapture(SemaRef, UBMinVal.
get(), Captures,
".ub_min").get();
8780 tryBuildCapture(SemaRef, UBMaxVal.
get(), Captures,
".ub_max").get();
8781 if (!UBMin || !UBMax)
8785 SemaRef.
BuildBinOp(S, DefaultLoc, BO_GT, UBMin, UBMax);
8788 Expr *MinGreaterMax = tryBuildCapture(SemaRef, MinGreaterMaxRes.
get(),
8789 Captures,
".min_greater_max")
8793 if (*TestIsLessOp) {
8797 DefaultLoc, DefaultLoc, MinGreaterMax, UBMin, UBMax);
8800 UBVal = MaxUB.
get();
8805 DefaultLoc, DefaultLoc, MinGreaterMax, UBMax, UBMin);
8808 UBVal = MinUB.
get();
8811 Expr *UBExpr = *TestIsLessOp ? UBVal : LBVal;
8812 Expr *LBExpr = *TestIsLessOp ? LBVal : UBVal;
8813 Expr *Upper = tryBuildCapture(SemaRef, UBExpr, Captures,
".upper").get();
8814 Expr *Lower = tryBuildCapture(SemaRef, LBExpr, Captures,
".lower").get();
8815 if (!Upper || !Lower)
8818 ExprResult Diff = calculateNumIters(SemaRef, S, DefaultLoc, Lower, Upper,
8819 Step, VarType, TestIsStrictOp,
8828 C.getTypeSize(
Type) >
C.getTypeSize(VarType);
8831 UseVarType ?
C.getTypeSize(VarType) :
C.getTypeSize(
Type);
8834 Type =
C.getIntTypeForBitwidth(NewSize, IsSigned);
8843 unsigned NewSize = (
C.getTypeSize(
Type) > 32) ? 64 : 32;
8844 if (NewSize !=
C.getTypeSize(
Type)) {
8845 if (NewSize <
C.getTypeSize(
Type)) {
8846 assert(NewSize == 64 &&
"incorrect loop var size");
8847 SemaRef.
Diag(DefaultLoc, diag::warn_omp_loop_64_bit_var)
8848 << InitSrcRange << ConditionSrcRange;
8850 QualType NewType =
C.getIntTypeForBitwidth(
8852 C.getTypeSize(
Type) < NewSize);
8865 std::pair<Expr *, Expr *> OpenMPIterationSpaceChecker::buildMinMaxValues(
8866 Scope *S, llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8870 return std::make_pair(
nullptr,
nullptr);
8873 Expr *MinExpr =
nullptr;
8874 Expr *MaxExpr =
nullptr;
8875 Expr *LBExpr = *TestIsLessOp ? LB : UB;
8876 Expr *UBExpr = *TestIsLessOp ? UB : LB;
8878 *TestIsLessOp ? InitDependOnLC.has_value() : CondDependOnLC.has_value();
8880 *TestIsLessOp ? CondDependOnLC.has_value() : InitDependOnLC.has_value();
8882 LBNonRect ? LBExpr : tryBuildCapture(SemaRef, LBExpr, Captures).get();
8884 UBNonRect ? UBExpr : tryBuildCapture(SemaRef, UBExpr, Captures).get();
8885 if (!Upper || !Lower)
8886 return std::make_pair(
nullptr,
nullptr);
8896 ExprResult Diff = calculateNumIters(SemaRef, S, DefaultLoc, Lower, Upper,
8897 Step, VarType, TestIsStrictOp,
8900 return std::make_pair(
nullptr,
nullptr);
8906 return std::make_pair(
nullptr,
nullptr);
8908 ExprResult NewStep = tryBuildCapture(SemaRef, Step, Captures,
".new_step");
8910 return std::make_pair(
nullptr,
nullptr);
8911 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Mul, Diff.
get(), NewStep.
get());
8913 return std::make_pair(
nullptr,
nullptr);
8918 return std::make_pair(
nullptr,
nullptr);
8923 Diff.
get()->getType(),
8930 return std::make_pair(
nullptr,
nullptr);
8932 if (*TestIsLessOp) {
8936 S, DefaultLoc, BO_Add,
8940 return std::make_pair(
nullptr,
nullptr);
8945 S, DefaultLoc, BO_Sub,
8949 return std::make_pair(
nullptr,
nullptr);
8958 return std::make_pair(
nullptr,
nullptr);
8963 return std::make_pair(
nullptr,
nullptr);
8966 MaxExpr = Diff.
get();
8968 MinExpr = Diff.
get();
8970 return std::make_pair(MinExpr, MaxExpr);
8973 Expr *OpenMPIterationSpaceChecker::buildFinalCondition(
Scope *S)
const {
8974 if (InitDependOnLC || CondDependOnLC)
8979 Expr *OpenMPIterationSpaceChecker::buildPreCond(
8981 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8986 if (CondDependOnLC || InitDependOnLC)
8997 ExprResult NewLB = tryBuildCapture(SemaRef, LB, Captures);
8998 ExprResult NewUB = tryBuildCapture(SemaRef, UB, Captures);
9004 *TestIsLessOp ? (TestIsStrictOp ? BO_LT : BO_LE)
9005 : (TestIsStrictOp ? BO_GT : BO_GE),
9006 NewLB.
get(), NewUB.
get());
9016 return CondExpr.
isUsable() ? CondExpr.
get() : Cond;
9020 DeclRefExpr *OpenMPIterationSpaceChecker::buildCounterVar(
9021 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
9022 DSAStackTy &DSA)
const {
9023 auto *VD = dyn_cast<VarDecl>(LCDecl);
9028 const DSAStackTy::DSAVarData
Data =
9029 DSA.getTopDSA(LCDecl,
false);
9033 Captures.insert(std::make_pair(LCRef, Ref));
9036 return cast<DeclRefExpr>(LCRef);
9039 Expr *OpenMPIterationSpaceChecker::buildPrivateCounterVar()
const {
9045 isa<VarDecl>(LCDecl)
9059 Expr *OpenMPIterationSpaceChecker::buildCounterStep()
const {
return Step; }
9061 Expr *OpenMPIterationSpaceChecker::buildOrderedLoopData(
9063 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
SourceLocation Loc,
9069 assert((OOK == OO_Plus || OOK == OO_Minus) &&
9070 "Expected only + or - operations for depend clauses.");
9082 *TestIsLessOp ? Cnt : tryBuildCapture(SemaRef, LB, Captures).get();
9084 *TestIsLessOp ? tryBuildCapture(SemaRef, LB, Captures).get() : Cnt;
9085 if (!Upper || !Lower)
9089 SemaRef, S, DefaultLoc, Lower, Upper, Step, VarType,
9090 false,
false, Captures);
9099 assert(
getLangOpts().OpenMP &&
"OpenMP is not active.");
9100 assert(Init &&
"Expected loop in canonical form.");
9101 unsigned AssociatedLoops =
DSAStack->getAssociatedLoops();
9102 if (AssociatedLoops > 0 &&
9105 OpenMPIterationSpaceChecker ISC(*
this,
true,
9107 if (!ISC.checkAndSetInit(Init,
false)) {
9109 auto *VD = dyn_cast<VarDecl>(D);
9115 PrivateRef =
buildCapture(*
this, D, ISC.getLoopDeclRefExpr(),
9117 VD = cast<VarDecl>(PrivateRef->
getDecl());
9120 DSAStack->addLoopControlVariable(D, VD);
9123 DSAStack->resetPossibleLoopCounter();
9124 if (
auto *Var = dyn_cast_or_null<VarDecl>(LD))
9138 DSAStackTy::DSAVarData DVar =
9142 Expr *LoopDeclRefExpr = ISC.getLoopDeclRefExpr();
9145 ? (
DSAStack->hasMutipleLoops() ? OMPC_lastprivate : OMPC_linear)
9148 DVar.CKind != PredeterminedCKind && DVar.RefExpr &&
9149 (
LangOpts.OpenMP <= 45 || (DVar.CKind != OMPC_lastprivate &&
9150 DVar.CKind != OMPC_private))) ||
9152 DKind == OMPD_master_taskloop || DKind == OMPD_masked_taskloop ||
9153 DKind == OMPD_parallel_master_taskloop ||
9154 DKind == OMPD_parallel_masked_taskloop ||
9157 DVar.CKind != OMPC_private && DVar.CKind != OMPC_lastprivate)) &&
9158 (DVar.CKind != OMPC_private || DVar.RefExpr)) {
9159 Diag(Init->getBeginLoc(), diag::err_omp_loop_var_dsa)
9160 << getOpenMPClauseName(DVar.CKind)
9161 << getOpenMPDirectiveName(DKind)
9162 << getOpenMPClauseName(PredeterminedCKind);
9163 if (DVar.RefExpr ==
nullptr)
9164 DVar.CKind = PredeterminedCKind;
9167 }
else if (LoopDeclRefExpr) {
9172 if (DVar.CKind == OMPC_unknown)
9173 DSAStack->addDSA(D, LoopDeclRefExpr, PredeterminedCKind,
9178 DSAStack->setAssociatedLoops(AssociatedLoops - 1);
9186 unsigned CurrentNestedLoopCount,
unsigned NestedLoopCount,
9187 unsigned TotalNestedLoopCount,
Expr *CollapseLoopCountExpr,
9188 Expr *OrderedLoopCountExpr,
9191 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9196 if (
auto *CanonLoop = dyn_cast_or_null<OMPCanonicalLoop>(S))
9197 S = CanonLoop->getLoopStmt();
9198 auto *For = dyn_cast_or_null<ForStmt>(S);
9199 auto *CXXFor = dyn_cast_or_null<CXXForRangeStmt>(S);
9201 if (!For && (SemaRef.
LangOpts.OpenMP <= 45 || !CXXFor)) {
9202 SemaRef.
Diag(S->getBeginLoc(), diag::err_omp_not_for)
9203 << (CollapseLoopCountExpr !=
nullptr || OrderedLoopCountExpr !=
nullptr)
9204 << getOpenMPDirectiveName(DKind) << TotalNestedLoopCount
9205 << (CurrentNestedLoopCount > 0) << CurrentNestedLoopCount;
9206 if (TotalNestedLoopCount > 1) {
9207 if (CollapseLoopCountExpr && OrderedLoopCountExpr)
9208 SemaRef.
Diag(DSA.getConstructLoc(),
9209 diag::note_omp_collapse_ordered_expr)
9212 else if (CollapseLoopCountExpr)
9214 diag::note_omp_collapse_ordered_expr)
9218 diag::note_omp_collapse_ordered_expr)
9223 assert(((For && For->getBody()) || (CXXFor && CXXFor->getBody())) &&
9229 OpenMPIterationSpaceChecker ISC(SemaRef, SupportsNonRectangular, DSA,
9230 For ? For->getForLoc() : CXXFor->getForLoc());
9233 Stmt *Init = For ? For->getInit() : CXXFor->getBeginStmt();
9234 if (ISC.checkAndSetInit(Init))
9237 bool HasErrors =
false;
9240 if (
ValueDecl *LCDecl = ISC.getLoopDecl()) {
9250 SemaRef.
Diag(Init->getBeginLoc(), diag::err_omp_loop_variable_type)
9264 VarsWithImplicitDSA.erase(LCDecl);
9269 HasErrors |= ISC.checkAndSetCond(For ? For->getCond() : CXXFor->getCond());
9272 HasErrors |= ISC.checkAndSetInc(For ? For->getInc() : CXXFor->getInc());
9279 ResultIterSpaces[CurrentNestedLoopCount].PreCond = ISC.buildPreCond(
9280 DSA.getCurScope(), For ? For->getCond() : CXXFor->getCond(), Captures);
9281 ResultIterSpaces[CurrentNestedLoopCount].NumIterations =
9282 ISC.buildNumIterations(DSA.getCurScope(), ResultIterSpaces,
9289 ResultIterSpaces[CurrentNestedLoopCount].CounterVar =
9290 ISC.buildCounterVar(Captures, DSA);
9291 ResultIterSpaces[CurrentNestedLoopCount].PrivateCounterVar =
9292 ISC.buildPrivateCounterVar();
9293 ResultIterSpaces[CurrentNestedLoopCount].CounterInit = ISC.buildCounterInit();
9294 ResultIterSpaces[CurrentNestedLoopCount].CounterStep = ISC.buildCounterStep();
9295 ResultIterSpaces[CurrentNestedLoopCount].InitSrcRange = ISC.getInitSrcRange();
9296 ResultIterSpaces[CurrentNestedLoopCount].CondSrcRange =
9297 ISC.getConditionSrcRange();
9298 ResultIterSpaces[CurrentNestedLoopCount].IncSrcRange =
9299 ISC.getIncrementSrcRange();
9300 ResultIterSpaces[CurrentNestedLoopCount].Subtract = ISC.shouldSubtractStep();
9301 ResultIterSpaces[CurrentNestedLoopCount].IsStrictCompare =
9302 ISC.isStrictTestOp();
9303 std::tie(ResultIterSpaces[CurrentNestedLoopCount].MinValue,
9304 ResultIterSpaces[CurrentNestedLoopCount].MaxValue) =
9305 ISC.buildMinMaxValues(DSA.getCurScope(), Captures);
9306 ResultIterSpaces[CurrentNestedLoopCount].FinalCondition =
9307 ISC.buildFinalCondition(DSA.getCurScope());
9308 ResultIterSpaces[CurrentNestedLoopCount].IsNonRectangularLB =
9309 ISC.doesInitDependOnLC();
9310 ResultIterSpaces[CurrentNestedLoopCount].IsNonRectangularUB =
9311 ISC.doesCondDependOnLC();
9312 ResultIterSpaces[CurrentNestedLoopCount].LoopDependentIdx =
9313 ISC.getLoopDependentIdx();
9316 (ResultIterSpaces[CurrentNestedLoopCount].PreCond ==
nullptr ||
9317 ResultIterSpaces[CurrentNestedLoopCount].NumIterations ==
nullptr ||
9318 ResultIterSpaces[CurrentNestedLoopCount].CounterVar ==
nullptr ||
9319 ResultIterSpaces[CurrentNestedLoopCount].PrivateCounterVar ==
nullptr ||
9320 ResultIterSpaces[CurrentNestedLoopCount].CounterInit ==
nullptr ||
9321 ResultIterSpaces[CurrentNestedLoopCount].CounterStep ==
nullptr);
9322 if (!HasErrors && DSA.isOrderedRegion()) {
9323 if (DSA.getOrderedRegionParam().second->getNumForLoops()) {
9324 if (CurrentNestedLoopCount <
9325 DSA.getOrderedRegionParam().second->getLoopNumIterations().size()) {
9326 DSA.getOrderedRegionParam().second->setLoopNumIterations(
9327 CurrentNestedLoopCount,
9328 ResultIterSpaces[CurrentNestedLoopCount].NumIterations);
9329 DSA.getOrderedRegionParam().second->setLoopCounter(
9330 CurrentNestedLoopCount,
9331 ResultIterSpaces[CurrentNestedLoopCount].CounterVar);
9334 for (
auto &Pair : DSA.getDoacrossDependClauses()) {
9335 if (CurrentNestedLoopCount >= Pair.first->getNumLoops()) {
9339 if (Pair.first->getDependencyKind() == OMPC_DEPEND_sink &&
9340 Pair.second.size() <= CurrentNestedLoopCount) {
9342 Pair.first->setLoopData(CurrentNestedLoopCount,
nullptr);
9346 if (Pair.first->getDependencyKind() == OMPC_DEPEND_source)
9347 CntValue = ISC.buildOrderedLoopData(
9349 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9350 Pair.first->getDependencyLoc());
9352 CntValue = ISC.buildOrderedLoopData(
9354 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9355 Pair.first->getDependencyLoc(),
9356 Pair.second[CurrentNestedLoopCount].first,
9357 Pair.second[CurrentNestedLoopCount].second);
9358 Pair.first->setLoopData(CurrentNestedLoopCount, CntValue);
9369 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9373 : tryBuildCapture(SemaRef, Start.
get(), Captures);
9377 VarRef.
get()->getType())) {
9394 bool IsNonRectangularLB,
9395 llvm::MapVector<const Expr *, DeclRefExpr *> *Captures =
nullptr) {
9404 NewStep = tryBuildCapture(SemaRef, Step.
get(), *Captures);
9419 if (Captures && !IsNonRectangularLB)
9420 NewStart = tryBuildCapture(SemaRef, Start.
get(), *Captures);
9427 if (VarRef.
get()->getType()->isOverloadableType() ||
9428 NewStart.
get()->getType()->isOverloadableType() ||
9429 Update.get()->getType()->isOverloadableType()) {
9436 SemaRef.
BuildBinOp(S, Loc, Subtract ? BO_SubAssign : BO_AddAssign,
9437 VarRef.
get(), SavedUpdate.
get());
9448 NewStart.
get(), SavedUpdate.
get());
9453 VarRef.
get()->getType())) {
9472 unsigned HasBits = C.getTypeSize(OldType);
9473 if (HasBits >= Bits)
9476 QualType NewType = C.getIntTypeForBitwidth(Bits,
true);
9486 if (std::optional<llvm::APSInt> Result =
9488 return Signed ? Result->isSignedIntN(Bits) : Result->isIntN(Bits);
9495 if (!PreInits.empty()) {
9506 const llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9507 if (!Captures.empty()) {
9509 for (
const auto &Pair : Captures)
9510 PreInits.push_back(Pair.second->getDecl());
9518 Expr *PostUpdate =
nullptr;
9519 if (!PostUpdates.empty()) {
9520 for (
Expr *E : PostUpdates) {
9526 PostUpdate = PostUpdate
9541 Expr *OrderedLoopCountExpr,
Stmt *AStmt,
Sema &SemaRef,
9545 unsigned NestedLoopCount = 1;
9546 bool SupportsNonPerfectlyNested = (SemaRef.
LangOpts.OpenMP >= 50) &&
9549 if (CollapseLoopCountExpr) {
9554 NestedLoopCount = Result.Val.getInt().getLimitedValue();
9560 unsigned OrderedLoopCount = 1;
9561 if (OrderedLoopCountExpr) {
9568 if (Result.getLimitedValue() < NestedLoopCount) {
9570 diag::err_omp_wrong_ordered_loop_count)
9573 diag::note_collapse_loop_count)
9576 OrderedLoopCount = Result.getLimitedValue();
9584 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
9585 unsigned NumLoops =
std::max(OrderedLoopCount, NestedLoopCount);
9589 SupportsNonPerfectlyNested, NumLoops,
9590 [DKind, &SemaRef, &DSA, NumLoops, NestedLoopCount,
9591 CollapseLoopCountExpr, OrderedLoopCountExpr, &VarsWithImplicitDSA,
9592 &IterSpaces, &Captures](
unsigned Cnt,
Stmt *CurStmt) {
9594 DKind, CurStmt, SemaRef, DSA, Cnt, NestedLoopCount,
9595 NumLoops, CollapseLoopCountExpr, OrderedLoopCountExpr,
9596 VarsWithImplicitDSA, IterSpaces, Captures))
9598 if (Cnt > 0 && Cnt >= NestedLoopCount &&
9599 IterSpaces[Cnt].CounterVar) {
9601 auto *DRE = cast<DeclRefExpr>(IterSpaces[Cnt].CounterVar);
9602 if (isa<OMPCapturedExprDecl>(DRE->getDecl())) {
9603 Captures[DRE] = DRE;
9609 Stmt *DependentPreInits = Transform->getPreInits();
9610 if (!DependentPreInits)
9612 for (
Decl *C : cast<DeclStmt>(DependentPreInits)->getDeclGroup()) {
9613 auto *D = cast<VarDecl>(C);
9615 Transform->getBeginLoc());
9616 Captures[Ref] = Ref;
9621 Built.
clear( NestedLoopCount);
9624 return NestedLoopCount;
9657 auto PreCond =
ExprResult(IterSpaces[0].PreCond);
9658 Expr *N0 = IterSpaces[0].NumIterations;
9662 .PerformImplicitConversion(
9677 return NestedLoopCount;
9680 bool AllCountsNeedLessThan32Bits = C.getTypeSize(N0->
getType()) < 32;
9682 Scope *CurScope = DSA.getCurScope();
9683 for (
unsigned Cnt = 1; Cnt < NestedLoopCount; ++Cnt) {
9684 if (PreCond.isUsable()) {
9686 SemaRef.
BuildBinOp(CurScope, PreCond.get()->getExprLoc(), BO_LAnd,
9687 PreCond.get(), IterSpaces[Cnt].PreCond);
9689 Expr *N = IterSpaces[Cnt].NumIterations;
9691 AllCountsNeedLessThan32Bits &= C.getTypeSize(N->
getType()) < 32;
9694 CurScope, Loc, BO_Mul, LastIteration32.
get(),
9702 CurScope, Loc, BO_Mul, LastIteration64.
get(),
9712 if (SemaRef.
getLangOpts().OpenMPOptimisticCollapse ||
9714 C.getTypeSize(LastIteration32.
get()->getType()) == 32 &&
9715 (AllCountsNeedLessThan32Bits || NestedLoopCount == 1 ||
9718 LastIteration32.
get()->getType()->hasSignedIntegerRepresentation(),
9719 LastIteration64.
get(), SemaRef))))
9720 LastIteration = LastIteration32;
9738 CurScope, LastIteration.
get()->getExprLoc(), BO_Sub,
9739 LastIteration.
get(),
9747 bool IsConstant = LastIteration.
get()->isIntegerConstantExpr(SemaRef.
Context);
9751 tryBuildCapture(SemaRef, LastIteration.
get(), Captures);
9752 LastIteration = SaveRef;
9756 CurScope, SaveRef.
get()->getExprLoc(), BO_Add, SaveRef.
get(),
9765 ExprResult LB, UB, IL, ST, EUB, CombLB, CombUB, PrevLB, PrevUB, CombEUB;
9794 buildVarDecl(SemaRef, InitLoc, StrideVType,
".omp.stride");
9803 UB.
get(), LastIteration.
get());
9805 LastIteration.
get()->getExprLoc(), InitLoc, IsUBGreater.
get(),
9806 LastIteration.
get(), UB.
get());
9807 EUB = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, UB.
get(),
9832 CurScope, InitLoc, BO_GT, CombUB.
get(), LastIteration.
get());
9835 LastIteration.
get(), CombUB.
get());
9836 CombEUB = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, CombUB.
get(),
9841 const CapturedDecl *CD = cast<CapturedStmt>(AStmt)->getCapturedDecl();
9845 "Unexpected number of parameters in loop combined directive");
9874 Init = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, IV.
get(), RHS);
9886 SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, IV.
get(), CombRHS);
9892 bool UseStrictCompare =
9894 llvm::all_of(IterSpaces, [](
const LoopIterationSpace &LIS) {
9895 return LIS.IsStrictCompare;
9901 if (UseStrictCompare) {
9904 .
BuildBinOp(CurScope, CondLoc, BO_Add, BoundUB,
9916 UseStrictCompare ? BO_LT : BO_LE, IV.
get(),
9919 NumIterations.
get());
9922 CombDistCond = SemaRef.
BuildBinOp(CurScope, CondLoc, BO_LT, IV.
get(),
9923 NumIterations.
get());
9928 Expr *BoundCombUB = CombUB.
get();
9929 if (UseStrictCompare) {
9933 CurScope, CondLoc, BO_Add, BoundCombUB,
9941 SemaRef.
BuildBinOp(CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE,
9942 IV.
get(), BoundCombUB);
9949 if (!Inc.isUsable())
9951 Inc = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, IV.
get(), Inc.get());
9953 if (!Inc.isUsable())
9960 ExprResult NextLB, NextUB, CombNextLB, CombNextUB;
9993 CombNextLB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, CombLB.
get(),
10005 CombNextUB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, CombUB.
get(),
10019 ExprResult DistCond, DistInc, PrevEUB, ParForInDistCond;
10022 CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE, IV.
get(), BoundUB);
10023 assert(DistCond.
isUsable() &&
"distribute cond expr was not built");
10027 assert(DistInc.
isUsable() &&
"distribute inc expr was not built");
10028 DistInc = SemaRef.
BuildBinOp(CurScope, DistIncLoc, BO_Assign, IV.
get(),
10032 assert(DistInc.
isUsable() &&
"distribute inc expr was not built");
10039 PrevUB.
get()->getType())) {
10043 DistEUBLoc, NewPrevUB.
get());
10048 UB.
get(), NewPrevUB.
get());
10050 DistEUBLoc, DistEUBLoc, IsUBGreater.
get(), NewPrevUB.
get(), UB.
get());
10051 PrevEUB = SemaRef.
BuildBinOp(CurScope, DistIncLoc, BO_Assign, UB.
get(),
10059 Expr *BoundPrevUB = PrevUB.
get();
10060 if (UseStrictCompare) {
10064 CurScope, CondLoc, BO_Add, BoundPrevUB,
10072 SemaRef.
BuildBinOp(CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE,
10073 IV.
get(), BoundPrevUB);
10077 bool HasErrors =
false;
10078 Built.
Counters.resize(NestedLoopCount);
10079 Built.
Inits.resize(NestedLoopCount);
10080 Built.
Updates.resize(NestedLoopCount);
10081 Built.
Finals.resize(NestedLoopCount);
10102 for (
unsigned int Cnt = 0; Cnt < NestedLoopCount; ++Cnt) {
10103 LoopIterationSpace &IS = IterSpaces[Cnt];
10109 for (
unsigned int K = Cnt + 1; K < NestedLoopCount; ++K)
10110 Prod = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Mul, Prod.
get(),
10111 IterSpaces[K].NumIterations);
10116 if (Cnt + 1 < NestedLoopCount)
10130 if (Cnt + 1 < NestedLoopCount)
10131 Prod = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Mul, Iter.
get(),
10135 Acc = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Sub, Acc.
get(), Prod.
get());
10138 auto *VD = cast<VarDecl>(cast<DeclRefExpr>(IS.CounterVar)->getDecl());
10140 SemaRef, VD, IS.CounterVar->getType(), IS.CounterVar->getExprLoc(),
10144 IS.CounterInit, IS.IsNonRectangularLB, Captures);
10145 if (!Init.isUsable()) {
10150 SemaRef, CurScope, UpdLoc, CounterVar, IS.CounterInit, Iter,
10151 IS.CounterStep, IS.Subtract, IS.IsNonRectangularLB, &Captures);
10152 if (!
Update.isUsable()) {
10160 IS.CounterInit, IS.NumIterations, IS.CounterStep,
10161 IS.Subtract, IS.IsNonRectangularLB, &Captures);
10162 if (!Final.isUsable()) {
10167 if (!
Update.isUsable() || !Final.isUsable()) {
10172 Built.
Counters[Cnt] = IS.CounterVar;
10174 Built.
Inits[Cnt] = Init.get();
10176 Built.
Finals[Cnt] = Final.get();
10180 if (IS.IsNonRectangularLB || IS.IsNonRectangularUB) {
10182 Built.
Counters[NestedLoopCount - 1 - IS.LoopDependentIdx];
10184 Built.
Inits[NestedLoopCount - 1 - IS.LoopDependentIdx];
10201 Built.
PreCond = PreCond.get();
10204 Built.
Init = Init.get();
10205 Built.
Inc = Inc.get();
10206 Built.
LB = LB.
get();
10207 Built.
UB = UB.
get();
10208 Built.
IL = IL.
get();
10209 Built.
ST = ST.
get();
10211 Built.
NLB = NextLB.
get();
10212 Built.
NUB = NextUB.
get();
10227 return NestedLoopCount;
10231 auto CollapseClauses =
10232 OMPExecutableDirective::getClausesOfKind<OMPCollapseClause>(Clauses);
10233 if (CollapseClauses.begin() != CollapseClauses.end())
10234 return (*CollapseClauses.begin())->getNumForLoops();
10239 auto OrderedClauses =
10240 OMPExecutableDirective::getClausesOfKind<OMPOrderedClause>(Clauses);
10241 if (OrderedClauses.begin() != OrderedClauses.end())
10242 return (*OrderedClauses.begin())->getNumForLoops();
10251 for (
const OMPClause *Clause : Clauses) {
10253 Safelen = cast<OMPSafelenClause>(Clause);
10255 Simdlen = cast<OMPSimdlenClause>(Clause);
10256 if (Safelen && Simdlen)
10260 if (Simdlen && Safelen) {
10280 if (SimdlenRes > SafelenRes) {
10282 diag::err_omp_wrong_simdlen_safelen_values)
10297 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10303 AStmt, *
this, *
DSAStack, VarsWithImplicitDSA, B);
10304 if (NestedLoopCount == 0)
10308 "omp simd loop exprs were not built");
10313 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10326 Clauses, AStmt, B);
10336 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10342 AStmt, *
this, *
DSAStack, VarsWithImplicitDSA, B);
10343 if (NestedLoopCount == 0)
10347 "omp for loop exprs were not built");
10352 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10362 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
10372 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10376 unsigned NestedLoopCount =
10379 VarsWithImplicitDSA, B);
10380 if (NestedLoopCount == 0)
10384 "omp for simd loop exprs were not built");
10389 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10402 Clauses, AStmt, B);
10412 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10413 auto BaseStmt = AStmt;
10414 while (
auto *CS = dyn_cast_or_null<CapturedStmt>(BaseStmt))
10416 if (
auto *C = dyn_cast_or_null<CompoundStmt>(BaseStmt)) {
10417 auto S = C->children();
10418 if (S.begin() == S.end())
10422 for (
Stmt *SectionStmt : llvm::drop_begin(S)) {
10423 if (!SectionStmt || !isa<OMPSectionDirective>(SectionStmt)) {
10425 Diag(SectionStmt->getBeginLoc(),
10426 diag::err_omp_sections_substmt_not_section);
10429 cast<OMPSectionDirective>(SectionStmt)
10430 ->setHasCancel(
DSAStack->isCancelRegion());
10433 Diag(AStmt->
getBeginLoc(), diag::err_omp_sections_not_compound_stmt);
10440 DSAStack->getTaskgroupReductionRef(),
10459 if (
auto *CE = dyn_cast<CallExpr>(E))
10460 if (CE->getDirectCallee())
10472 Stmt *S = cast<CapturedStmt>(AStmt)->getCapturedStmt();
10482 Expr *TargetCall =
nullptr;
10484 auto *E = dyn_cast<Expr>(S);
10486 Diag(S->getBeginLoc(), diag::err_omp_dispatch_statement_call);
10492 if (
auto *BO = dyn_cast<BinaryOperator>(E)) {
10493 if (BO->getOpcode() == BO_Assign)
10496 if (
auto *COCE = dyn_cast<CXXOperatorCallExpr>(E))
10497 if (COCE->getOperator() == OO_Equal)
10517 DSAStackTy *Stack) {
10518 bool ErrorFound =
false;
10520 if (
auto *LPC = dyn_cast<OMPLastprivateClause>(C)) {
10521 for (
Expr *RefExpr : LPC->varlists()) {
10524 Expr *SimpleRefExpr = RefExpr;
10527 auto &&Info = Stack->isLoopControlVariable(D);
10529 S.
Diag(ELoc, diag::err_omp_lastprivate_loop_var_non_loop_iteration)
10530 << getOpenMPDirectiveName(K);
10552 auto *CS = cast<CapturedStmt>(AStmt);
10564 AStmt, *
this, *
DSAStack, VarsWithImplicitDSA, B);
10565 if (NestedLoopCount == 0)
10569 "omp loop exprs were not built");
10573 NestedLoopCount, Clauses, AStmt, B);
10588 auto *CS = cast<CapturedStmt>(AStmt);
10596 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10608 unsigned NestedLoopCount =
10611 VarsWithImplicitDSA, B);
10612 if (NestedLoopCount == 0)
10616 "omp loop exprs were not built");
10619 DSAStack->setParentTeamsRegionLoc(StartLoc);
10622 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10638 auto *CS = cast<CapturedStmt>(AStmt);
10646 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10658 unsigned NestedLoopCount =
10661 VarsWithImplicitDSA, B);
10662 if (NestedLoopCount == 0)
10666 "omp loop exprs were not built");
10671 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10686 auto *CS = cast<CapturedStmt>(AStmt);
10694 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10706 unsigned NestedLoopCount =
10709 VarsWithImplicitDSA, B);
10710 if (NestedLoopCount == 0)
10714 "omp loop exprs were not built");
10719 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10735 auto *CS = cast<CapturedStmt>(AStmt);
10743 ThisCaptureLevel > 1; --ThisCaptureLevel) {
10755 unsigned NestedLoopCount =
10758 VarsWithImplicitDSA, B);
10759 if (NestedLoopCount == 0)
10763 "omp loop exprs were not built");
10768 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10778 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10785 const OMPClause *Copyprivate =
nullptr;
10786 for (
const OMPClause *Clause : Clauses) {
10790 Copyprivate = Clause;
10791 if (Copyprivate && Nowait) {
10793 diag::err_omp_single_copyprivate_with_nowait);
10831 bool ErrorFound =
false;
10834 bool DependentHint =
false;
10836 if (C->getClauseKind() == OMPC_hint) {
10838 Diag(C->getBeginLoc(), diag::err_omp_hint_clause_no_name);
10841 Expr *E = cast<OMPHintClause>(C)->getHint();
10844 DependentHint =
true;
10847 HintLoc = C->getBeginLoc();
10853 const auto Pair =
DSAStack->getCriticalWithHint(DirName);
10854 if (Pair.first && DirName.
getName() && !DependentHint) {
10855 if (llvm::APSInt::compareValues(Hint, Pair.second) != 0) {
10856 Diag(StartLoc, diag::err_omp_critical_with_hint);
10858 Diag(HintLoc, diag::note_omp_critical_hint_here)
10859 << 0 <<
toString(Hint, 10,
false);
10861 Diag(StartLoc, diag::note_omp_critical_no_hint) << 0;
10862 if (
const auto *C = Pair.first->getSingleClause<
OMPHintClause>()) {
10863 Diag(C->getBeginLoc(), diag::note_omp_critical_hint_here)
10868 Diag(Pair.first->getBeginLoc(), diag::note_omp_critical_no_hint) << 1;
10877 if (!Pair.first && DirName.
getName() && !DependentHint)
10878 DSAStack->addCriticalWithHint(Dir, Hint);
10888 auto *CS = cast<CapturedStmt>(AStmt);
10899 unsigned NestedLoopCount =
10902 VarsWithImplicitDSA, B);
10903 if (NestedLoopCount == 0)
10907 "omp parallel for loop exprs were not built");
10912 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10922 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
10932 auto *CS = cast<CapturedStmt>(AStmt);
10943 unsigned NestedLoopCount =
10946 VarsWithImplicitDSA, B);
10947 if (NestedLoopCount == 0)
10953 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
10966 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10976 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
10977 auto *CS = cast<CapturedStmt>(AStmt);
10988 Context, StartLoc, EndLoc, Clauses, AStmt,
10989 DSAStack->getTaskgroupReductionRef());
10999 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
11000 auto *CS = cast<CapturedStmt>(AStmt);
11011 Context, StartLoc, EndLoc, Clauses, AStmt,
11012 DSAStack->getTaskgroupReductionRef());
11022 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
11023 auto BaseStmt = AStmt;
11024 while (
auto *CS = dyn_cast_or_null<CapturedStmt>(BaseStmt))
11026 if (
auto *C = dyn_cast_or_null<CompoundStmt>(BaseStmt)) {
11027 auto S = C->children();
11028 if (S.begin() == S.end())
11032 for (
Stmt *SectionStmt : llvm::drop_begin(S)) {
11033 if (!SectionStmt || !isa<OMPSectionDirective>(SectionStmt)) {
11035 Diag(SectionStmt->getBeginLoc(),
11036 diag::err_omp_parallel_sections_substmt_not_section);
11039 cast<OMPSectionDirective>(SectionStmt)
11040 ->setHasCancel(
DSAStack->isCancelRegion());
11044 diag::err_omp_parallel_sections_not_compound_stmt);
11051 Context, StartLoc, EndLoc, Clauses, AStmt,
11060 bool ErrorFound =
false;
11062 if (llvm::is_contained(MutuallyExclusiveClauses, C->getClauseKind())) {
11065 }
else if (PrevClause->
getClauseKind() != C->getClauseKind()) {
11066 S.
Diag(C->getBeginLoc(), diag::err_omp_clauses_mutually_exclusive)
11067 << getOpenMPClauseName(C->getClauseKind())
11088 {OMPC_detach, OMPC_mergeable}))
11091 auto *CS = cast<CapturedStmt>(AStmt);
11118 bool InExContext) {
11120 OMPExecutableDirective::getSingleClause<OMPAtClause>(Clauses);
11122 if (AtC && !InExContext && AtC->
getAtKind() == OMPC_AT_execution) {
11128 OMPExecutableDirective::getSingleClause<OMPSeverityClause>(Clauses);
11130 OMPExecutableDirective::getSingleClause<OMPMessageClause>(Clauses);
11133 if (!AtC || AtC->
getAtKind() == OMPC_AT_compilation) {
11134 if (SeverityC && SeverityC->
getSeverityKind() == OMPC_SEVERITY_warning)
11136 << (ME ? cast<StringLiteral>(ME)->getString() :
"WARNING");
11138 Diag(StartLoc, diag::err_diagnose_if_succeeded)
11139 << (ME ? cast<StringLiteral>(ME)->getString() :
"ERROR");
11140 if (!SeverityC || SeverityC->
getSeverityKind() != OMPC_SEVERITY_warning)
11150 OMPExecutableDirective::getSingleClause<OMPNowaitClause>(Clauses);
11152 !OMPExecutableDirective::getClausesOfKind<OMPDependClause>(Clauses)
11154 if (NowaitC && !HasDependC) {
11155 Diag(StartLoc, diag::err_omp_nowait_clause_without_depend);
11169 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
11175 DSAStack->getTaskgroupReductionRef());
11184 if (C->getClauseKind() == OMPC_flush)
11185 FC = cast<OMPFlushClause>(C);
11192 if (C->getClauseKind() == OMPC_acq_rel ||
11193 C->getClauseKind() == OMPC_acquire ||
11194 C->getClauseKind() == OMPC_release) {
11195 if (MemOrderKind != OMPC_unknown) {
11196 Diag(C->getBeginLoc(), diag::err_omp_several_mem_order_clauses)
11197 << getOpenMPDirectiveName(OMPD_flush) << 1
11198 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
11199 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
11200 << getOpenMPClauseName(MemOrderKind);
11202 MemOrderKind = C->getClauseKind();
11203 MemOrderLoc = C->getBeginLoc();
11207 if (FC && OrderClause) {
11210 Diag(OrderClause->
getBeginLoc(), diag::note_omp_flush_order_clause_here)
11220 if (Clauses.empty()) {
11221 Diag(StartLoc, diag::err_omp_depobj_expected);
11223 }
else if (Clauses[0]->getClauseKind() != OMPC_depobj) {
11224 Diag(Clauses[0]->getBeginLoc(), diag::err_omp_depobj_expected);
11228 if (Clauses.size() > 2) {
11229 Diag(Clauses[2]->getBeginLoc(),
11230 diag::err_omp_depobj_single_clause_expected);
11232 }
else if (Clauses.size() < 1) {
11233 Diag(Clauses[0]->getEndLoc(), diag::err_omp_depobj_single_clause_expected);
11243 if (Clauses.size() != 1) {
11244 Diag(Clauses.empty() ? EndLoc : Clauses[1]->getBeginLoc(),
11245 diag::err_omp_scan_single_clause_expected);
11250 Scope *ParentS = S->getParent();
11253 return StmtError(
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
11254 << getOpenMPDirectiveName(OMPD_scan) << 5);
11258 if (
DSAStack->doesParentHasScanDirective()) {
11259 Diag(StartLoc, diag::err_omp_several_directives_in_region) <<
"scan";
11261 diag::note_omp_previous_directive)
11265 DSAStack->setParentHasScanDirective(StartLoc);
11273 const OMPClause *DependFound =
nullptr;
11274 const OMPClause *DependSourceClause =
nullptr;
11275 const OMPClause *DependSinkClause =
nullptr;
11276 bool ErrorFound =
false;
11280 if (
auto *DC = dyn_cast<OMPDependClause>(C)) {
11282 if (DC->getDependencyKind() == OMPC_DEPEND_source) {
11283 if (DependSourceClause) {
11284 Diag(C->getBeginLoc(), diag::err_omp_more_one_clause)
11285 << getOpenMPDirectiveName(OMPD_ordered)
11286 << getOpenMPClauseName(OMPC_depend) << 2;
11289 DependSourceClause = C;
11291 if (DependSinkClause) {
11292 Diag(C->getBeginLoc(), diag::err_omp_depend_sink_source_not_allowed)
11296 }
else if (DC->getDependencyKind() == OMPC_DEPEND_sink) {
11297 if (DependSourceClause) {
11298 Diag(C->getBeginLoc(), diag::err_omp_depend_sink_source_not_allowed)
11302 DependSinkClause = C;
11304 }
else if (C->getClauseKind() == OMPC_threads) {
11305 TC = cast<OMPThreadsClause>(C);
11306 }
else if (C->getClauseKind() == OMPC_simd) {
11307 SC = cast<OMPSIMDClause>(C);
11310 if (!ErrorFound && !SC &&
11315 Diag(StartLoc, diag::err_omp_prohibited_region_simd)
11316 << (
LangOpts.OpenMP >= 50 ? 1 : 0);
11318 }
else if (DependFound && (TC || SC)) {
11319 Diag(DependFound->
getBeginLoc(), diag::err_omp_depend_clause_thread_simd)
11322 }
else if (DependFound && !
DSAStack->getParentOrderedRegionParam().first) {
11324 diag::err_omp_ordered_directive_without_param);
11326 }
else if (TC || Clauses.empty()) {
11327 if (
const Expr *Param =
DSAStack->getParentOrderedRegionParam().first) {
11329 Diag(ErrLoc, diag::err_omp_ordered_directive_with_param)
11330 << (TC !=
nullptr);
11335 if ((!AStmt && !DependFound) || ErrorFound)
11343 if (!DependFound) {
11344 if (
DSAStack->doesParentHasOrderedDirective()) {
11345 Diag(StartLoc, diag::err_omp_several_directives_in_region) <<
"ordered";
11347 diag::note_omp_previous_directive)
11351 DSAStack->setParentHasOrderedDirective(StartLoc);
11355 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
11366 class OpenMPAtomicUpdateChecker {
11368 enum ExprAnalysisErrorCode {
11372 NotABinaryOrUnaryExpression,
11374 NotAnUnaryIncDecExpression,
11380 NotABinaryExpression,
11383 NotABinaryOperator,
11386 NotAnUpdateExpression,
11404 bool IsXLHSInRHSPart;
11409 bool IsPostfixUpdate;
11412 OpenMPAtomicUpdateChecker(
Sema &SemaRef)
11413 : SemaRef(SemaRef),
X(nullptr), E(nullptr), UpdateExpr(nullptr),
11414 IsXLHSInRHSPart(
false), Op(BO_PtrMemD), IsPostfixUpdate(
false) {}
11422 bool checkStatement(
Stmt *S,
unsigned DiagId = 0,
unsigned NoteId = 0);
11424 Expr *getX()
const {
return X; }
11426 Expr *getExpr()
const {
return E; }
11430 Expr *getUpdateExpr()
const {
return UpdateExpr; }
11433 bool isXLHSInRHSPart()
const {
return IsXLHSInRHSPart; }
11437 bool isPostfixUpdate()
const {
return IsPostfixUpdate; }
11440 bool checkBinaryOperation(
BinaryOperator *AtomicBinOp,
unsigned DiagId = 0,
11441 unsigned NoteId = 0);
11444 bool OpenMPAtomicUpdateChecker::checkBinaryOperation(
11445 BinaryOperator *AtomicBinOp,
unsigned DiagId,
unsigned NoteId) {
11446 ExprAnalysisErrorCode ErrorFound = NoError;
11452 if (AtomicBinOp->
getOpcode() == BO_Assign) {
11454 if (
const auto *AtomicInnerBinOp = dyn_cast<BinaryOperator>(
11456 if (AtomicInnerBinOp->isMultiplicativeOp() ||
11457 AtomicInnerBinOp->isAdditiveOp() || AtomicInnerBinOp->isShiftOp() ||
11458 AtomicInnerBinOp->isBitwiseOp()) {
11459 Op = AtomicInnerBinOp->getOpcode();
11460 OpLoc = AtomicInnerBinOp->getOperatorLoc();
11461 Expr *LHS = AtomicInnerBinOp->getLHS();
11462 Expr *RHS = AtomicInnerBinOp->getRHS();
11463 llvm::FoldingSetNodeID XId, LHSId, RHSId;
11470 if (XId == LHSId) {
11472 IsXLHSInRHSPart =
true;
11473 }
else if (XId == RHSId) {
11475 IsXLHSInRHSPart =
false;
11477 ErrorLoc = AtomicInnerBinOp->getExprLoc();
11478 ErrorRange = AtomicInnerBinOp->getSourceRange();
11479 NoteLoc =
X->getExprLoc();
11480 NoteRange =
X->getSourceRange();
11481 ErrorFound = NotAnUpdateExpression;
11484 ErrorLoc = AtomicInnerBinOp->getExprLoc();
11485 ErrorRange = AtomicInnerBinOp->getSourceRange();
11486 NoteLoc = AtomicInnerBinOp->getOperatorLoc();
11488 ErrorFound = NotABinaryOperator;
11493 ErrorFound = NotABinaryExpression;
11500 ErrorFound = NotAnAssignmentOp;
11502 if (ErrorFound != NoError && DiagId != 0 && NoteId != 0) {
11503 SemaRef.
Diag(ErrorLoc, DiagId) << ErrorRange;
11504 SemaRef.
Diag(NoteLoc, NoteId) << ErrorFound << NoteRange;
11508 E =
X = UpdateExpr =
nullptr;
11509 return ErrorFound != NoError;
11512 bool OpenMPAtomicUpdateChecker::checkStatement(
Stmt *S,
unsigned DiagId,
11514 ExprAnalysisErrorCode ErrorFound = NoError;
11525 if (
auto *AtomicBody = dyn_cast<Expr>(S)) {
11526 AtomicBody = AtomicBody->IgnoreParenImpCasts();
11527 if (AtomicBody->getType()->isScalarType() ||
11528 AtomicBody->isInstantiationDependent()) {
11529 if (
const auto *AtomicCompAssignOp = dyn_cast<CompoundAssignOperator>(
11530 AtomicBody->IgnoreParenImpCasts())) {
11533 AtomicCompAssignOp->getOpcode());
11534 OpLoc = AtomicCompAssignOp->getOperatorLoc();
11535 E = AtomicCompAssignOp->getRHS();
11536 X = AtomicCompAssignOp->getLHS()->IgnoreParens();
11537 IsXLHSInRHSPart =
true;
11538 }
else if (
auto *AtomicBinOp = dyn_cast<BinaryOperator>(
11539 AtomicBody->IgnoreParenImpCasts())) {
11541 if (checkBinaryOperation(AtomicBinOp, DiagId, NoteId))
11543 }
else if (
const auto *AtomicUnaryOp = dyn_cast<UnaryOperator>(
11544 AtomicBody->IgnoreParenImpCasts())) {
11546 if (AtomicUnaryOp->isIncrementDecrementOp()) {
11547 IsPostfixUpdate = AtomicUnaryOp->isPostfix();
11548 Op = AtomicUnaryOp->isIncrementOp() ? BO_Add : BO_Sub;
11549 OpLoc = AtomicUnaryOp->getOperatorLoc();
11550 X = AtomicUnaryOp->getSubExpr()->IgnoreParens();
11552 IsXLHSInRHSPart =
true;
11554 ErrorFound = NotAnUnaryIncDecExpression;
11555 ErrorLoc = AtomicUnaryOp->getExprLoc();
11556 ErrorRange = AtomicUnaryOp->getSourceRange();
11557 NoteLoc = AtomicUnaryOp->getOperatorLoc();
11560 }
else if (!AtomicBody->isInstantiationDependent()) {
11561 ErrorFound = NotABinaryOrUnaryExpression;
11562 NoteLoc = ErrorLoc = AtomicBody->getExprLoc();
11563 NoteRange = ErrorRange = AtomicBody->getSourceRange();
11566 ErrorFound = NotAScalarType;
11567 NoteLoc = ErrorLoc = AtomicBody->getBeginLoc();
11568 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
11571 ErrorFound = NotAnExpression;
11572 NoteLoc = ErrorLoc = S->getBeginLoc();
11573 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
11575 if (ErrorFound != NoError && DiagId != 0 && NoteId != 0) {
11576 SemaRef.
Diag(ErrorLoc, DiagId) << ErrorRange;
11577 SemaRef.
Diag(NoteLoc, NoteId) << ErrorFound << NoteRange;
11581 E =
X = UpdateExpr =
nullptr;
11582 if (ErrorFound == NoError && E &&
X) {
11592 IsXLHSInRHSPart ? OVEExpr : OVEX);
11599 UpdateExpr =
Update.get();
11601 return ErrorFound != NoError;
11605 llvm::FoldingSetNodeID getNodeId(
ASTContext &Context,
const Expr *S) {
11606 llvm::FoldingSetNodeID
Id;
11607 S->IgnoreParenImpCasts()->Profile(
Id, Context,
true);
11612 bool checkIfTwoExprsAreSame(
ASTContext &Context,
const Expr *LHS,
11614 return getNodeId(Context, LHS) == getNodeId(Context, RHS);
11617 class OpenMPAtomicCompareChecker {
11664 struct ErrorInfoTy {
11672 OpenMPAtomicCompareChecker(
Sema &S) : ContextRef(S.getASTContext()) {}
11675 bool checkStmt(
Stmt *S, ErrorInfoTy &ErrorInfo);
11677 Expr *getX()
const {
return X; }
11678 Expr *getE()
const {
return E; }
11679 Expr *getD()
const {
return D; }
11680 Expr *getCond()
const {
return C; }
11681 bool isXBinopExpr()
const {
return IsXBinopExpr; }
11700 bool IsXBinopExpr =
true;
11703 bool checkCondUpdateStmt(
IfStmt *S, ErrorInfoTy &ErrorInfo);
11706 bool checkCondExprStmt(
Stmt *S, ErrorInfoTy &ErrorInfo);
11709 bool checkType(ErrorInfoTy &ErrorInfo)
const;
11711 static bool CheckValue(
const Expr *E, ErrorInfoTy &ErrorInfo,
11712 bool ShouldBeLValue,
bool ShouldBeInteger =
false) {
11716 if (ShouldBeLValue && !E->
isLValue()) {
11717 ErrorInfo.Error = ErrorTy::XNotLValue;
11718 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11719 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11725 ErrorInfo.Error = ErrorTy::NotScalar;
11726 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11727 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11731 ErrorInfo.Error = ErrorTy::NotInteger;
11732 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11733 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11741 bool OpenMPAtomicCompareChecker::checkCondUpdateStmt(
IfStmt *S,
11742 ErrorInfoTy &ErrorInfo) {
11743 auto *Then = S->getThen();
11744 if (
auto *CS = dyn_cast<CompoundStmt>(Then)) {
11745 if (CS->body_empty()) {
11746 ErrorInfo.Error = ErrorTy::NoStmt;
11747 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11748 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11751 if (CS->size() > 1) {
11752 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11753 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11754 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11757 Then = CS->body_front();
11760 auto *BO = dyn_cast<BinaryOperator>(Then);
11762 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11763 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Then->getBeginLoc();
11764 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Then->getSourceRange();
11767 if (BO->getOpcode() != BO_Assign) {
11768 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11769 ErrorInfo.ErrorLoc = BO->getExprLoc();
11770 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11771 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11777 auto *Cond = dyn_cast<BinaryOperator>(S->getCond());
11779 ErrorInfo.Error = ErrorTy::NotABinaryOp;
11780 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getCond()->getExprLoc();
11781 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getCond()->getSourceRange();
11785 switch (Cond->getOpcode()) {
11789 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS())) {
11790 E = Cond->getRHS();
11791 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11792 E = Cond->getLHS();
11794 ErrorInfo.Error = ErrorTy::InvalidComparison;
11795 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11796 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11804 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS()) &&
11805 checkIfTwoExprsAreSame(ContextRef, E, Cond->getRHS())) {
11807 }
else if (checkIfTwoExprsAreSame(ContextRef, E, Cond->getLHS()) &&
11808 checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11810 IsXBinopExpr =
false;
11812 ErrorInfo.Error = ErrorTy::InvalidComparison;
11813 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11814 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11820 ErrorInfo.Error = ErrorTy::InvalidBinaryOp;
11821 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11822 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11826 if (S->getElse()) {
11827 ErrorInfo.Error = ErrorTy::UnexpectedElse;
11828 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getElse()->getBeginLoc();
11829 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getElse()->getSourceRange();
11836 bool OpenMPAtomicCompareChecker::checkCondExprStmt(
Stmt *S,
11837 ErrorInfoTy &ErrorInfo) {
11838 auto *BO = dyn_cast<BinaryOperator>(S);
11840 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11841 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getBeginLoc();
11842 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
11845 if (BO->getOpcode() != BO_Assign) {
11846 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11847 ErrorInfo.ErrorLoc = BO->getExprLoc();
11848 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11849 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11855 auto *CO = dyn_cast<ConditionalOperator>(BO->getRHS()->IgnoreParenImpCasts());
11857 ErrorInfo.Error = ErrorTy::NotCondOp;
11858 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = BO->getRHS()->getExprLoc();
11859 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getRHS()->getSourceRange();
11863 if (!checkIfTwoExprsAreSame(ContextRef,
X, CO->getFalseExpr())) {
11864 ErrorInfo.Error = ErrorTy::WrongFalseExpr;
11865 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getFalseExpr()->getExprLoc();
11866 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
11867 CO->getFalseExpr()->getSourceRange();
11871 auto *Cond = dyn_cast<BinaryOperator>(CO->getCond());
11873 ErrorInfo.Error = ErrorTy::NotABinaryOp;
11874 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getCond()->getExprLoc();
11875 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
11876 CO->getCond()->getSourceRange();
11880 switch (Cond->getOpcode()) {
11883 D = CO->getTrueExpr();
11884 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS())) {
11885 E = Cond->getRHS();
11886 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11887 E = Cond->getLHS();
11889 ErrorInfo.Error = ErrorTy::InvalidComparison;
11890 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11891 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11898 E = CO->getTrueExpr();
11899 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS()) &&
11900 checkIfTwoExprsAreSame(ContextRef, E, Cond->getRHS())) {
11902 }
else if (checkIfTwoExprsAreSame(ContextRef, E, Cond->getLHS()) &&
11903 checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
11905 IsXBinopExpr =
false;
11907 ErrorInfo.Error = ErrorTy::InvalidComparison;
11908 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11909 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11915 ErrorInfo.Error = ErrorTy::InvalidBinaryOp;
11916 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
11917 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
11924 bool OpenMPAtomicCompareChecker::checkType(ErrorInfoTy &ErrorInfo)
const {
11926 assert(
X && E &&
"X and E cannot be nullptr");
11928 if (!CheckValue(
X, ErrorInfo,
true))
11931 if (!CheckValue(E, ErrorInfo,
false))
11934 if (D && !CheckValue(D, ErrorInfo,
false))
11940 bool OpenMPAtomicCompareChecker::checkStmt(
11941 Stmt *S, OpenMPAtomicCompareChecker::ErrorInfoTy &ErrorInfo) {
11942 auto *CS = dyn_cast<CompoundStmt>(S);
11944 if (CS->body_empty()) {
11945 ErrorInfo.Error = ErrorTy::NoStmt;
11946 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11947 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11951 if (CS->size() != 1) {
11952 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11953 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11954 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11957 S = CS->body_front();
11962 if (
auto *IS = dyn_cast<IfStmt>(S)) {
11968 Res = checkCondUpdateStmt(IS, ErrorInfo);
11974 Res = checkCondExprStmt(S, ErrorInfo);
11980 return checkType(ErrorInfo);
11983 class OpenMPAtomicCompareCaptureChecker final
11984 :
public OpenMPAtomicCompareChecker {
11986 OpenMPAtomicCompareCaptureChecker(
Sema &S) : OpenMPAtomicCompareChecker(S) {}
11988 Expr *getV()
const {
return V; }
11989 Expr *getR()
const {
return R; }
11990 bool isFailOnly()
const {
return IsFailOnly; }
11991 bool isPostfixUpdate()
const {
return IsPostfixUpdate; }
11994 bool checkStmt(
Stmt *S, ErrorInfoTy &ErrorInfo);
11997 bool checkType(ErrorInfoTy &ErrorInfo);
12009 bool checkForm3(
IfStmt *S, ErrorInfoTy &ErrorInfo);
12013 bool checkForm45(
Stmt *S, ErrorInfoTy &ErrorInfo);
12020 bool IsFailOnly =
false;
12022 bool IsPostfixUpdate =
false;
12025 bool OpenMPAtomicCompareCaptureChecker::checkType(ErrorInfoTy &ErrorInfo) {
12026 if (!OpenMPAtomicCompareChecker::checkType(ErrorInfo))
12029 if (
V && !CheckValue(
V, ErrorInfo,
true))
12032 if (R && !CheckValue(R, ErrorInfo,
true,
true))
12038 bool OpenMPAtomicCompareCaptureChecker::checkForm3(
IfStmt *S,
12039 ErrorInfoTy &ErrorInfo) {
12042 auto *Then = S->getThen();
12043 if (
auto *CS = dyn_cast<CompoundStmt>(Then)) {
12044 if (CS->body_empty()) {
12045 ErrorInfo.Error = ErrorTy::NoStmt;
12046 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12047 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12050 if (CS->size() > 1) {
12051 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12052 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12053 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12056 Then = CS->body_front();
12059 auto *BO = dyn_cast<BinaryOperator>(Then);
12061 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12062 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Then->getBeginLoc();
12063 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Then->getSourceRange();
12066 if (BO->getOpcode() != BO_Assign) {
12067 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12068 ErrorInfo.ErrorLoc = BO->getExprLoc();
12069 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12070 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12077 auto *Cond = dyn_cast<BinaryOperator>(S->getCond());
12079 ErrorInfo.Error = ErrorTy::NotABinaryOp;
12080 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getCond()->getExprLoc();
12081 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getCond()->getSourceRange();
12084 if (Cond->getOpcode() != BO_EQ) {
12085 ErrorInfo.Error = ErrorTy::NotEQ;
12086 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
12087 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
12091 if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getLHS())) {
12092 E = Cond->getRHS();
12093 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, Cond->getRHS())) {
12094 E = Cond->getLHS();
12096 ErrorInfo.Error = ErrorTy::InvalidComparison;
12097 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Cond->
getExprLoc();
12098 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Cond->
getSourceRange();
12104 if (!S->getElse()) {
12105 ErrorInfo.Error = ErrorTy::NoElse;
12106 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getBeginLoc();
12107 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
12111 auto *Else = S->getElse();
12112 if (
auto *CS = dyn_cast<CompoundStmt>(Else)) {
12113 if (CS->body_empty()) {
12114 ErrorInfo.Error = ErrorTy::NoStmt;
12115 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12116 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12119 if (CS->size() > 1) {
12120 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12121 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12122 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
12125 Else = CS->body_front();
12128 auto *ElseBO = dyn_cast<BinaryOperator>(Else);
12130 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12131 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Else->getBeginLoc();
12132 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Else->getSourceRange();
12135 if (ElseBO->getOpcode() != BO_Assign) {
12136 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12137 ErrorInfo.ErrorLoc = ElseBO->getExprLoc();
12138 ErrorInfo.NoteLoc = ElseBO->getOperatorLoc();
12139 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseBO->getSourceRange();
12143 if (!checkIfTwoExprsAreSame(ContextRef,
X, ElseBO->getRHS())) {
12144 ErrorInfo.Error = ErrorTy::InvalidAssignment;
12145 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseBO->getRHS()->getExprLoc();
12146 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12147 ElseBO->getRHS()->getSourceRange();
12151 V = ElseBO->getLHS();
12153 return checkType(ErrorInfo);
12156 bool OpenMPAtomicCompareCaptureChecker::checkForm45(
Stmt *S,
12157 ErrorInfoTy &ErrorInfo) {
12160 auto *CS = cast<CompoundStmt>(S);
12161 assert(CS->size() == 2 &&
"CompoundStmt size is not expected");
12162 auto *S1 = cast<BinaryOperator>(CS->body_front());
12163 auto *S2 = cast<IfStmt>(CS->body_back());
12164 assert(S1->getOpcode() == BO_Assign &&
"unexpected binary operator");
12166 if (!checkIfTwoExprsAreSame(ContextRef, S1->getLHS(), S2->getCond())) {
12167 ErrorInfo.Error = ErrorTy::InvalidCondition;
12168 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S2->getCond()->getExprLoc();
12169 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S1->getLHS()->getSourceRange();
12175 auto *Then = S2->getThen();
12176 if (
auto *ThenCS = dyn_cast<CompoundStmt>(Then)) {
12177 if (ThenCS->body_empty()) {
12178 ErrorInfo.Error = ErrorTy::NoStmt;
12179 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ThenCS->getBeginLoc();
12180 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenCS->getSourceRange();
12183 if (ThenCS->size() > 1) {
12184 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12185 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ThenCS->getBeginLoc();
12186 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenCS->getSourceRange();
12189 Then = ThenCS->body_front();
12192 auto *ThenBO = dyn_cast<BinaryOperator>(Then);
12194 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12195 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S2->getBeginLoc();
12196 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S2->getSourceRange();
12199 if (ThenBO->getOpcode() != BO_Assign) {
12200 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12201 ErrorInfo.ErrorLoc = ThenBO->getExprLoc();
12202 ErrorInfo.NoteLoc = ThenBO->getOperatorLoc();
12203 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenBO->getSourceRange();
12207 X = ThenBO->getLHS();
12208 D = ThenBO->getRHS();
12210 auto *BO = cast<BinaryOperator>(S1->getRHS()->IgnoreImpCasts());
12211 if (BO->getOpcode() != BO_EQ) {
12212 ErrorInfo.Error = ErrorTy::NotEQ;
12213 ErrorInfo.ErrorLoc = BO->getExprLoc();
12214 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12215 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12221 if (checkIfTwoExprsAreSame(ContextRef,
X, BO->getLHS())) {
12223 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, BO->getRHS())) {
12226 ErrorInfo.Error = ErrorTy::InvalidComparison;
12227 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = BO->getExprLoc();
12228 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12232 if (S2->getElse()) {
12235 auto *Else = S2->getElse();
12236 if (
auto *ElseCS = dyn_cast<CompoundStmt>(Else)) {
12237 if (ElseCS->body_empty()) {
12238 ErrorInfo.Error = ErrorTy::NoStmt;
12239 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseCS->getBeginLoc();
12240 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseCS->getSourceRange();
12243 if (ElseCS->size() > 1) {
12244 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12245 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseCS->getBeginLoc();
12246 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseCS->getSourceRange();
12249 Else = ElseCS->body_front();
12252 auto *ElseBO = dyn_cast<BinaryOperator>(Else);
12254 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12255 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Else->getBeginLoc();
12256 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Else->getSourceRange();
12259 if (ElseBO->getOpcode() != BO_Assign) {
12260 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12261 ErrorInfo.ErrorLoc = ElseBO->getExprLoc();
12262 ErrorInfo.NoteLoc = ElseBO->getOperatorLoc();
12263 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseBO->getSourceRange();
12266 if (!checkIfTwoExprsAreSame(ContextRef,
X, ElseBO->getRHS())) {
12267 ErrorInfo.Error = ErrorTy::InvalidAssignment;
12268 ErrorInfo.ErrorLoc = ElseBO->getRHS()->getExprLoc();
12269 ErrorInfo.NoteLoc =
X->getExprLoc();
12270 ErrorInfo.ErrorRange = ElseBO->getRHS()->getSourceRange();
12271 ErrorInfo.NoteRange =
X->getSourceRange();
12275 V = ElseBO->getLHS();
12278 return checkType(ErrorInfo);
12281 bool OpenMPAtomicCompareCaptureChecker::checkStmt(
Stmt *S,
12282 ErrorInfoTy &ErrorInfo) {
12284 if (
auto *IS = dyn_cast<IfStmt>(S))
12285 return checkForm3(IS, ErrorInfo);
12287 auto *CS = dyn_cast<CompoundStmt>(S);
12289 ErrorInfo.Error = ErrorTy::NotCompoundStmt;
12290 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->getBeginLoc();
12291 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->getSourceRange();
12294 if (CS->body_empty()) {
12295 ErrorInfo.Error = ErrorTy::NoStmt;
12296 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12297 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12302 if (CS->size() == 1) {
12303 auto *IS = dyn_cast<IfStmt>(CS->body_front());
12305 ErrorInfo.Error = ErrorTy::NotIfStmt;
12306 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->body_front()->
getBeginLoc();
12307 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12312 return checkForm3(IS, ErrorInfo);
12313 }
else if (CS->size() == 2) {
12314 auto *S1 = CS->body_front();
12315 auto *S2 = CS->body_back();
12317 Stmt *UpdateStmt =
nullptr;
12318 Stmt *CondUpdateStmt =
nullptr;
12319 Stmt *CondExprStmt =
nullptr;
12321 if (
auto *BO = dyn_cast<BinaryOperator>(S1)) {
12327 if (isa<BinaryOperator>(BO->getRHS()->IgnoreImpCasts()) ||
12328 isa<ConditionalOperator>(BO->getRHS()->IgnoreImpCasts())) {
12330 if (isa<IfStmt>(S2))
12331 return checkForm45(CS, ErrorInfo);
12336 IsPostfixUpdate =
true;
12338 if (isa<IfStmt>(S2)) {
12340 CondUpdateStmt = S2;
12349 CondUpdateStmt = S1;
12352 auto CheckCondUpdateStmt = [
this, &ErrorInfo](
Stmt *CUS) {
12353 auto *IS = dyn_cast<IfStmt>(CUS);
12355 ErrorInfo.Error = ErrorTy::NotIfStmt;
12356 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CUS->getBeginLoc();
12357 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CUS->getSourceRange();
12361 return checkCondUpdateStmt(IS, ErrorInfo);
12365 auto CheckUpdateStmt = [
this, &ErrorInfo](
Stmt *US) {
12366 auto *BO = dyn_cast<BinaryOperator>(US);
12368 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12369 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = US->getBeginLoc();
12370 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = US->getSourceRange();
12373 if (BO->getOpcode() != BO_Assign) {
12374 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12375 ErrorInfo.ErrorLoc = BO->getExprLoc();
12376 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12377 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12380 if (!checkIfTwoExprsAreSame(ContextRef, this->X, BO->getRHS())) {
12381 ErrorInfo.Error = ErrorTy::InvalidAssignment;
12382 ErrorInfo.ErrorLoc = BO->getRHS()->getExprLoc();
12384 ErrorInfo.ErrorRange = BO->getRHS()->getSourceRange();
12389 this->
V = BO->getLHS();
12394 if (CondUpdateStmt && !CheckCondUpdateStmt(CondUpdateStmt))
12396 if (CondExprStmt && !checkCondExprStmt(CondExprStmt, ErrorInfo))
12398 if (!CheckUpdateStmt(UpdateStmt))
12401 ErrorInfo.Error = ErrorTy::MoreThanTwoStmts;
12402 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12403 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12407 return checkType(ErrorInfo);
12416 DSAStack->addAtomicDirectiveLoc(StartLoc);
12429 bool MutexClauseEncountered =
false;
12430 llvm::SmallSet<OpenMPClauseKind, 2> EncounteredAtomicKinds;
12432 switch (C->getClauseKind()) {
12436 MutexClauseEncountered =
true;
12439 case OMPC_compare: {
12440 if (AtomicKind != OMPC_unknown && MutexClauseEncountered) {
12441 Diag(C->getBeginLoc(), diag::err_omp_atomic_several_clauses)
12442 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
12443 Diag(AtomicKindLoc, diag::note_omp_previous_mem_order_clause)
12444 << getOpenMPClauseName(AtomicKind);
12446 AtomicKind = C->getClauseKind();
12447 AtomicKindLoc = C->getBeginLoc();
12448 if (!EncounteredAtomicKinds.insert(C->getClauseKind()).second) {
12449 Diag(C->getBeginLoc(), diag::err_omp_atomic_several_clauses)
12450 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
12451 Diag(AtomicKindLoc, diag::note_omp_previous_mem_order_clause)
12452 << getOpenMPClauseName(AtomicKind);
12461 case OMPC_relaxed: {
12462 if (MemOrderKind != OMPC_unknown) {
12463 Diag(C->getBeginLoc(), diag::err_omp_several_mem_order_clauses)
12464 << getOpenMPDirectiveName(OMPD_atomic) << 0
12465 <<
SourceRange(C->getBeginLoc(), C->getEndLoc());
12466 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
12467 << getOpenMPClauseName(MemOrderKind);
12469 MemOrderKind = C->getClauseKind();
12470 MemOrderLoc = C->getBeginLoc();
12478 llvm_unreachable(
"unknown clause is encountered");
12481 bool IsCompareCapture =
false;
12482 if (EncounteredAtomicKinds.contains(OMPC_compare) &&
12483 EncounteredAtomicKinds.contains(OMPC_capture)) {
12484 IsCompareCapture =
true;
12485 AtomicKind = OMPC_compare;
12494 if ((AtomicKind == OMPC_read &&
12495 (MemOrderKind == OMPC_acq_rel || MemOrderKind == OMPC_release)) ||
12496 ((AtomicKind == OMPC_write || AtomicKind == OMPC_update ||
12497 AtomicKind == OMPC_unknown) &&
12498 (MemOrderKind == OMPC_acq_rel || MemOrderKind == OMPC_acquire))) {
12500 if (AtomicKind == OMPC_unknown)
12502 Diag(Loc, diag::err_omp_atomic_incompatible_mem_order_clause)
12503 << getOpenMPClauseName(AtomicKind)
12504 << (AtomicKind == OMPC_unknown ? 1 : 0)
12505 << getOpenMPClauseName(MemOrderKind);
12506 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
12507 << getOpenMPClauseName(MemOrderKind);
12510 Stmt *Body = AStmt;
12511 if (
auto *EWC = dyn_cast<ExprWithCleanups>(Body))
12512 Body = EWC->getSubExpr();
12517 Expr *UE =
nullptr;
12519 Expr *CE =
nullptr;
12521 bool IsXLHSInRHSPart =
false;
12522 bool IsPostfixUpdate =
false;
12523 bool IsFailOnly =
false;
12546 if (AtomicKind == OMPC_read) {
12553 } ErrorFound = NoError;
12558 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12559 const auto *AtomicBinOp =
12560 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12561 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12564 if ((
X->isInstantiationDependent() ||
X->getType()->isScalarType()) &&
12565 (
V->isInstantiationDependent() ||
V->getType()->isScalarType())) {
12566 if (!
X->isLValue() || !
V->isLValue()) {
12567 const Expr *NotLValueExpr =
X->isLValue() ?
V :
X;
12568 ErrorFound = NotAnLValue;
12574 }
else if (!
X->isInstantiationDependent() ||
12575 !
V->isInstantiationDependent()) {
12576 const Expr *NotScalarExpr =
12577 (
X->isInstantiationDependent() ||
X->getType()->isScalarType())
12580 ErrorFound = NotAScalarType;
12586 }
else if (!AtomicBody->isInstantiationDependent()) {
12587 ErrorFound = NotAnAssignmentOp;
12588 ErrorLoc = AtomicBody->getExprLoc();
12589 ErrorRange = AtomicBody->getSourceRange();
12591 : AtomicBody->getExprLoc();
12593 : AtomicBody->getSourceRange();
12596 ErrorFound = NotAnExpression;
12598 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
12600 if (ErrorFound != NoError) {
12601 Diag(ErrorLoc, diag::err_omp_atomic_read_not_expression_statement)
12603 Diag(NoteLoc, diag::note_omp_atomic_read_write)
12604 << ErrorFound << NoteRange;
12609 }
else if (AtomicKind == OMPC_write) {
12616 } ErrorFound = NoError;
12621 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12622 const auto *AtomicBinOp =
12623 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12624 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12626 E = AtomicBinOp->
getRHS();
12627 if ((
X->isInstantiationDependent() ||
X->getType()->isScalarType()) &&
12629 if (!
X->isLValue()) {
12630 ErrorFound = NotAnLValue;
12633 NoteLoc =
X->getExprLoc();
12634 NoteRange =
X->getSourceRange();
12636 }
else if (!
X->isInstantiationDependent() ||
12638 const Expr *NotScalarExpr =
12639 (
X->isInstantiationDependent() ||
X->getType()->isScalarType())
12642 ErrorFound = NotAScalarType;
12648 }
else if (!AtomicBody->isInstantiationDependent()) {
12649 ErrorFound = NotAnAssignmentOp;
12650 ErrorLoc = AtomicBody->getExprLoc();
12651 ErrorRange = AtomicBody->getSourceRange();
12653 : AtomicBody->getExprLoc();
12655 : AtomicBody->getSourceRange();
12658 ErrorFound = NotAnExpression;
12660 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
12662 if (ErrorFound != NoError) {
12663 Diag(ErrorLoc, diag::err_omp_atomic_write_not_expression_statement)
12665 Diag(NoteLoc, diag::note_omp_atomic_read_write)
12666 << ErrorFound << NoteRange;
12671 }
else if (AtomicKind == OMPC_update || AtomicKind == OMPC_unknown) {
12680 OpenMPAtomicUpdateChecker Checker(*
this);
12681 if (Checker.checkStatement(
12683 (AtomicKind == OMPC_update)
12684 ? diag::err_omp_atomic_update_not_expression_statement
12685 : diag::err_omp_atomic_not_expression_statement,
12686 diag::note_omp_atomic_update))
12689 E = Checker.getExpr();
12690 X = Checker.getX();
12691 UE = Checker.getUpdateExpr();
12692 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12694 }
else if (AtomicKind == OMPC_capture) {
12697 NotACompoundStatement,
12698 NotTwoSubstatements,
12699 NotASpecificExpression,
12701 } ErrorFound = NoError;
12704 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12713 const auto *AtomicBinOp =
12714 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12715 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12718 OpenMPAtomicUpdateChecker Checker(*
this);
12719 if (Checker.checkStatement(
12720 Body, diag::err_omp_atomic_capture_not_expression_statement,
12721 diag::note_omp_atomic_update))
12723 E = Checker.getExpr();
12724 X = Checker.getX();
12725 UE = Checker.getUpdateExpr();
12726 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12727 IsPostfixUpdate = Checker.isPostfixUpdate();
12728 }
else if (!AtomicBody->isInstantiationDependent()) {
12729 ErrorLoc = AtomicBody->getExprLoc();
12730 ErrorRange = AtomicBody->getSourceRange();
12732 : AtomicBody->getExprLoc();
12734 : AtomicBody->getSourceRange();
12735 ErrorFound = NotAnAssignmentOp;
12737 if (ErrorFound != NoError) {
12738 Diag(ErrorLoc, diag::err_omp_atomic_capture_not_expression_statement)
12740 Diag(NoteLoc, diag::note_omp_atomic_capture) << ErrorFound << NoteRange;
12744 UE =
V = E =
X =
nullptr;
12762 if (
auto *CS = dyn_cast<CompoundStmt>(Body)) {
12764 if (CS->size() == 2) {
12766 Stmt *Second = CS->body_back();
12767 if (
auto *EWC = dyn_cast<ExprWithCleanups>(
First))
12768 First = EWC->getSubExpr()->IgnoreParenImpCasts();
12769 if (
auto *EWC = dyn_cast<ExprWithCleanups>(Second))
12770 Second = EWC->getSubExpr()->IgnoreParenImpCasts();
12772 OpenMPAtomicUpdateChecker Checker(*
this);
12773 bool IsUpdateExprFound = !Checker.checkStatement(Second);
12775 if (IsUpdateExprFound) {
12776 BinOp = dyn_cast<BinaryOperator>(
First);
12777 IsUpdateExprFound = BinOp && BinOp->
getOpcode() == BO_Assign;
12789 llvm::FoldingSetNodeID XId, PossibleXId;
12790 Checker.getX()->Profile(XId,
Context,
true);
12792 IsUpdateExprFound = XId == PossibleXId;
12793 if (IsUpdateExprFound) {
12795 X = Checker.getX();
12796 E = Checker.getExpr();
12797 UE = Checker.getUpdateExpr();
12798 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12799 IsPostfixUpdate =
true;
12802 if (!IsUpdateExprFound) {
12803 IsUpdateExprFound = !Checker.checkStatement(
First);
12805 if (IsUpdateExprFound) {
12806 BinOp = dyn_cast<BinaryOperator>(Second);
12807 IsUpdateExprFound = BinOp && BinOp->
getOpcode() == BO_Assign;
12819 llvm::FoldingSetNodeID XId, PossibleXId;
12820 Checker.getX()->Profile(XId,
Context,
true);
12822 IsUpdateExprFound = XId == PossibleXId;
12823 if (IsUpdateExprFound) {
12825 X = Checker.getX();
12826 E = Checker.getExpr();
12827 UE = Checker.getUpdateExpr();
12828 IsXLHSInRHSPart = Checker.isXLHSInRHSPart();
12829 IsPostfixUpdate =
false;
12833 if (!IsUpdateExprFound) {
12835 auto *FirstExpr = dyn_cast<Expr>(
First);
12836 auto *SecondExpr = dyn_cast<Expr>(Second);
12837 if (!FirstExpr || !SecondExpr ||
12838 !(FirstExpr->isInstantiationDependent() ||
12839 SecondExpr->isInstantiationDependent())) {
12840 auto *FirstBinOp = dyn_cast<BinaryOperator>(
First);
12841 if (!FirstBinOp || FirstBinOp->getOpcode() != BO_Assign) {
12842 ErrorFound = NotAnAssignmentOp;
12843 NoteLoc = ErrorLoc = FirstBinOp ? FirstBinOp->getOperatorLoc()
12844 :
First->getBeginLoc();
12845 NoteRange = ErrorRange = FirstBinOp
12846 ? FirstBinOp->getSourceRange()
12849 auto *SecondBinOp = dyn_cast<BinaryOperator>(Second);
12850 if (!SecondBinOp || SecondBinOp->getOpcode() != BO_Assign) {
12851 ErrorFound = NotAnAssignmentOp;
12852 NoteLoc = ErrorLoc = SecondBinOp
12853 ? SecondBinOp->getOperatorLoc()
12855 NoteRange = ErrorRange =
12856 SecondBinOp ? SecondBinOp->getSourceRange()
12859 Expr *PossibleXRHSInFirst =
12861 Expr *PossibleXLHSInSecond =
12863 llvm::FoldingSetNodeID X1Id, X2Id;
12868 IsUpdateExprFound = X1Id == X2Id;
12869 if (IsUpdateExprFound) {
12870 V = FirstBinOp->getLHS();
12871 X = SecondBinOp->getLHS();
12872 E = SecondBinOp->getRHS();
12874 IsXLHSInRHSPart =
false;
12875 IsPostfixUpdate =
true;
12877 ErrorFound = NotASpecificExpression;
12878 ErrorLoc = FirstBinOp->getExprLoc();
12879 ErrorRange = FirstBinOp->getSourceRange();
12880 NoteLoc = SecondBinOp->getLHS()->getExprLoc();
12881 NoteRange = SecondBinOp->getRHS()->getSourceRange();
12889 NoteRange = ErrorRange =
12891 ErrorFound = NotTwoSubstatements;
12895 NoteRange = ErrorRange =
12897 ErrorFound = NotACompoundStatement;
12900 if (ErrorFound != NoError) {
12901 Diag(ErrorLoc, diag::err_omp_atomic_capture_not_compound_statement)
12903 Diag(NoteLoc, diag::note_omp_atomic_capture) << ErrorFound << NoteRange;
12907 UE =
V = E =
X =
nullptr;
12908 }
else if (AtomicKind == OMPC_compare) {
12909 if (IsCompareCapture) {
12910 OpenMPAtomicCompareCaptureChecker::ErrorInfoTy ErrorInfo;
12911 OpenMPAtomicCompareCaptureChecker Checker(*
this);
12912 if (!Checker.checkStmt(Body, ErrorInfo)) {
12913 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_compare_capture)
12914 << ErrorInfo.ErrorRange;
12915 Diag(ErrorInfo.NoteLoc, diag::note_omp_atomic_compare)
12916 << ErrorInfo.Error << ErrorInfo.NoteRange;
12919 X = Checker.getX();
12920 E = Checker.getE();
12921 D = Checker.getD();
12922 CE = Checker.getCond();
12923 V = Checker.getV();
12924 R = Checker.getR();
12926 IsXLHSInRHSPart = Checker.isXBinopExpr();
12927 IsFailOnly = Checker.isFailOnly();
12928 IsPostfixUpdate = Checker.isPostfixUpdate();
12930 OpenMPAtomicCompareChecker::ErrorInfoTy ErrorInfo;
12931 OpenMPAtomicCompareChecker Checker(*
this);
12932 if (!Checker.checkStmt(Body, ErrorInfo)) {
12933 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_compare)
12934 << ErrorInfo.ErrorRange;
12935 Diag(ErrorInfo.NoteLoc, diag::note_omp_atomic_compare)
12936 << ErrorInfo.Error << ErrorInfo.NoteRange;
12939 X = Checker.getX();
12940 E = Checker.getE();
12941 D = Checker.getD();
12942 CE = Checker.getCond();
12944 IsXLHSInRHSPart = Checker.isXBinopExpr();
12951 Context, StartLoc, EndLoc, Clauses, AStmt,
12952 {
X,
V, R, E, UE, D, CE, IsXLHSInRHSPart, IsPostfixUpdate, IsFailOnly});
12962 auto *CS = cast<CapturedStmt>(AStmt);
12970 ThisCaptureLevel > 1; --ThisCaptureLevel) {
12984 if (
DSAStack->hasInnerTeamsRegion()) {
12986 bool OMPTeamsFound =
true;
12987 if (
const auto *CS = dyn_cast<CompoundStmt>(S)) {
12988 auto I = CS->body_begin();
12989 while (I != CS->body_end()) {
12990 const auto *OED = dyn_cast<OMPExecutableDirective>(*I);
12994 OMPTeamsFound =
false;
12999 assert(I != CS->body_end() &&
"Not found statement");
13002 const auto *OED = dyn_cast<OMPExecutableDirective>(S);
13005 if (!OMPTeamsFound) {
13006 Diag(StartLoc, diag::err_omp_target_contains_not_only_teams);
13008 diag::note_omp_nested_teams_construct_here);
13009 Diag(S->getBeginLoc(), diag::note_omp_nested_statement_here)
13010 << isa<OMPExecutableDirective>(S);
13027 auto *CS = cast<CapturedStmt>(AStmt);
13035 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13048 Context, StartLoc, EndLoc, Clauses, AStmt,
13058 auto *CS = cast<CapturedStmt>(AStmt);
13066 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13079 unsigned NestedLoopCount =
13082 VarsWithImplicitDSA, B);
13083 if (NestedLoopCount == 0)
13087 "omp target parallel for loop exprs were not built");
13092 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13102 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13109 return llvm::any_of(
13110 Clauses, [K](
const OMPClause *C) {
return C->getClauseKind() == K; });
13113 template <
typename... Params>
13115 const Params... ClauseTypes) {
13122 if (
auto *TC = dyn_cast<OMPToClause>(C))
13123 return llvm::all_of(TC->all_decls(), [](
ValueDecl *VD) {
13124 return !VD || !VD->hasAttr<OMPDeclareTargetDeclAttr>() ||
13125 (VD->isExternallyVisible() &&
13126 VD->getVisibility() != HiddenVisibility);
13128 else if (
auto *FC = dyn_cast<OMPFromClause>(C))
13129 return llvm::all_of(FC->all_decls(), [](
ValueDecl *VD) {
13130 return !VD || !VD->hasAttr<OMPDeclareTargetDeclAttr>() ||
13131 (VD->isExternallyVisible() &&
13132 VD->getVisibility() != HiddenVisibility);
13146 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13151 if (!
hasClauses(Clauses, OMPC_map, OMPC_use_device_ptr) &&
13155 Expected =
"'map' or 'use_device_ptr'";
13157 Expected =
"'map', 'use_device_ptr', or 'use_device_addr'";
13158 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
13159 <<
Expected << getOpenMPDirectiveName(OMPD_target_data);
13176 auto *CS = cast<CapturedStmt>(AStmt);
13184 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13197 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
13198 <<
"'map'" << getOpenMPDirectiveName(OMPD_target_enter_data);
13213 auto *CS = cast<CapturedStmt>(AStmt);
13221 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13234 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
13235 <<
"'map'" << getOpenMPDirectiveName(OMPD_target_exit_data);
13250 auto *CS = cast<CapturedStmt>(AStmt);
13258 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13268 if (!
hasClauses(Clauses, OMPC_to, OMPC_from)) {
13269 Diag(StartLoc, diag::err_omp_at_least_one_motion_clause_required);
13274 Diag(StartLoc, diag::err_omp_cannot_update_with_internal_linkage);
13288 auto *CS = cast<CapturedStmt>(AStmt);
13298 DSAStack->setParentTeamsRegionLoc(StartLoc);
13307 if (
DSAStack->isParentNowaitRegion()) {
13308 Diag(StartLoc, diag::err_omp_parent_cancel_region_nowait) << 0;
13311 if (
DSAStack->isParentOrderedRegion()) {
13312 Diag(StartLoc, diag::err_omp_parent_cancel_region_ordered) << 0;
13323 if (
DSAStack->isParentNowaitRegion()) {
13324 Diag(StartLoc, diag::err_omp_parent_cancel_region_nowait) << 1;
13327 if (
DSAStack->isParentOrderedRegion()) {
13328 Diag(StartLoc, diag::err_omp_parent_cancel_region_ordered) << 1;
13331 DSAStack->setParentCancelRegion(
true);
13338 const OMPClause *ReductionClause =
nullptr;
13339 const OMPClause *NogroupClause =
nullptr;
13341 if (C->getClauseKind() == OMPC_reduction) {
13342 ReductionClause = C;
13347 if (C->getClauseKind() == OMPC_nogroup) {
13349 if (ReductionClause)
13354 if (ReductionClause && NogroupClause) {
13355 S.
Diag(ReductionClause->
getBeginLoc(), diag::err_omp_reduction_with_nogroup)
13369 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13373 unsigned NestedLoopCount =
13376 VarsWithImplicitDSA, B);
13377 if (NestedLoopCount == 0)
13381 "omp for loop exprs were not built");
13387 {OMPC_grainsize, OMPC_num_tasks}))
13397 NestedLoopCount, Clauses, AStmt, B,
13407 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13411 unsigned NestedLoopCount =
13414 VarsWithImplicitDSA, B);
13415 if (NestedLoopCount == 0)
13419 "omp for loop exprs were not built");
13424 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13436 {OMPC_grainsize, OMPC_num_tasks}))
13448 NestedLoopCount, Clauses, AStmt, B);
13457 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13461 unsigned NestedLoopCount =
13464 VarsWithImplicitDSA, B);
13465 if (NestedLoopCount == 0)
13469 "omp for loop exprs were not built");
13475 {OMPC_grainsize, OMPC_num_tasks}))
13485 NestedLoopCount, Clauses, AStmt, B,
13495 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13499 unsigned NestedLoopCount =
13502 VarsWithImplicitDSA, B);
13503 if (NestedLoopCount == 0)
13507 "omp for loop exprs were not built");
13513 {OMPC_grainsize, OMPC_num_tasks}))
13523 NestedLoopCount, Clauses, AStmt, B,
13533 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13537 unsigned NestedLoopCount =
13540 VarsWithImplicitDSA, B);
13541 if (NestedLoopCount == 0)
13545 "omp for loop exprs were not built");
13550 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13562 {OMPC_grainsize, OMPC_num_tasks}))
13574 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13583 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13587 unsigned NestedLoopCount =
13590 VarsWithImplicitDSA, B);
13591 if (NestedLoopCount == 0)
13595 "omp for loop exprs were not built");
13600 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13612 {OMPC_grainsize, OMPC_num_tasks}))
13624 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13633 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13634 auto *CS = cast<CapturedStmt>(AStmt);
13641 for (
int ThisCaptureLevel =
13643 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13659 VarsWithImplicitDSA, B);
13660 if (NestedLoopCount == 0)
13664 "omp for loop exprs were not built");
13670 {OMPC_grainsize, OMPC_num_tasks}))
13680 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13690 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13691 auto *CS = cast<CapturedStmt>(AStmt);
13698 for (
int ThisCaptureLevel =
13700 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13716 VarsWithImplicitDSA, B);
13717 if (NestedLoopCount == 0)
13721 "omp for loop exprs were not built");
13727 {OMPC_grainsize, OMPC_num_tasks}))
13737 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13747 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13748 auto *CS = cast<CapturedStmt>(AStmt);
13755 for (
int ThisCaptureLevel =
13757 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13773 VarsWithImplicitDSA, B);
13774 if (NestedLoopCount == 0)
13778 "omp for loop exprs were not built");
13783 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13795 {OMPC_grainsize, OMPC_num_tasks}))
13807 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13816 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13817 auto *CS = cast<CapturedStmt>(AStmt);
13824 for (
int ThisCaptureLevel =
13826 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13842 VarsWithImplicitDSA, B);
13843 if (NestedLoopCount == 0)
13847 "omp for loop exprs were not built");
13852 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
13864 {OMPC_grainsize, OMPC_num_tasks}))
13876 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13885 assert(isa<CapturedStmt>(AStmt) &&
"Captured statement expected");
13889 unsigned NestedLoopCount =
13892 *
this, *
DSAStack, VarsWithImplicitDSA, B);
13893 if (NestedLoopCount == 0)
13897 "omp for loop exprs were not built");
13901 NestedLoopCount, Clauses, AStmt, B);
13910 auto *CS = cast<CapturedStmt>(AStmt);
13917 for (
int ThisCaptureLevel =
13919 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13935 VarsWithImplicitDSA, B);
13936 if (NestedLoopCount == 0)
13940 "omp for loop exprs were not built");
13944 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13954 auto *CS = cast<CapturedStmt>(AStmt);
13961 for (
int ThisCaptureLevel =
13963 ThisCaptureLevel > 1; --ThisCaptureLevel) {
13979 VarsWithImplicitDSA, B);
13980 if (NestedLoopCount == 0)
13984 "omp for loop exprs were not built");
13989 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14002 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14011 auto *CS = cast<CapturedStmt>(AStmt);
14019 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14032 unsigned NestedLoopCount =
14034 nullptr , CS, *
this,
14035 *
DSAStack, VarsWithImplicitDSA, B);
14036 if (NestedLoopCount == 0)
14040 "omp for loop exprs were not built");
14045 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14058 NestedLoopCount, Clauses, AStmt, B);
14067 auto *CS = cast<CapturedStmt>(AStmt);
14075 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14092 if (NestedLoopCount == 0)
14096 "omp target parallel for simd loop exprs were not built");
14101 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14113 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14122 auto *CS = cast<CapturedStmt>(AStmt);
14130 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14143 unsigned NestedLoopCount =
14146 VarsWithImplicitDSA, B);
14147 if (NestedLoopCount == 0)
14151 "omp target simd loop exprs were not built");
14156 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14169 NestedLoopCount, Clauses, AStmt, B);
14178 auto *CS = cast<CapturedStmt>(AStmt);
14186 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14199 unsigned NestedLoopCount =
14201 nullptr , CS, *
this,
14202 *
DSAStack, VarsWithImplicitDSA, B);
14203 if (NestedLoopCount == 0)
14207 "omp teams distribute loop exprs were not built");
14211 DSAStack->setParentTeamsRegionLoc(StartLoc);
14214 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14223 auto *CS = cast<CapturedStmt>(AStmt);
14230 for (
int ThisCaptureLevel =
14232 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14248 VarsWithImplicitDSA, B);
14250 if (NestedLoopCount == 0)
14254 "omp teams distribute simd loop exprs were not built");
14259 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14272 DSAStack->setParentTeamsRegionLoc(StartLoc);
14275 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14284 auto *CS = cast<CapturedStmt>(AStmt);
14292 for (
int ThisCaptureLevel =
14294 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14310 VarsWithImplicitDSA, B);
14312 if (NestedLoopCount == 0)
14316 "omp for loop exprs were not built");
14321 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14334 DSAStack->setParentTeamsRegionLoc(StartLoc);
14337 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14346 auto *CS = cast<CapturedStmt>(AStmt);
14354 for (
int ThisCaptureLevel =
14356 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14372 VarsWithImplicitDSA, B);
14374 if (NestedLoopCount == 0)
14378 "omp for loop exprs were not built");
14382 DSAStack->setParentTeamsRegionLoc(StartLoc);
14385 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
14396 auto *CS = cast<CapturedStmt>(AStmt);
14405 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14426 auto *CS = cast<CapturedStmt>(AStmt);
14433 for (
int ThisCaptureLevel =
14435 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14451 VarsWithImplicitDSA, B);
14452 if (NestedLoopCount == 0)
14456 "omp target teams distribute loop exprs were not built");
14460 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14469 auto *CS = cast<CapturedStmt>(AStmt);
14476 for (
int ThisCaptureLevel =
14478 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14494 VarsWithImplicitDSA, B);
14495 if (NestedLoopCount == 0)
14499 "omp target teams distribute parallel for loop exprs were not built");
14504 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14514 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
14524 auto *CS = cast<CapturedStmt>(AStmt);
14532 OMPD_target_teams_distribute_parallel_for_simd);
14533 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14546 unsigned NestedLoopCount =
14549 nullptr , CS, *
this,
14550 *
DSAStack, VarsWithImplicitDSA, B);
14551 if (NestedLoopCount == 0)
14555 "omp target teams distribute parallel for simd loop exprs were not "
14561 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14574 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14583 auto *CS = cast<CapturedStmt>(AStmt);
14590 for (
int ThisCaptureLevel =
14592 ThisCaptureLevel > 1; --ThisCaptureLevel) {
14608 VarsWithImplicitDSA, B);
14609 if (NestedLoopCount == 0)
14613 "omp target teams distribute simd loop exprs were not built");
14618 if (
auto *LC = dyn_cast<OMPLinearClause>(C))
14631 Context, StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14634 bool Sema::checkTransformableLoopNest(
14640 OriginalInits.emplace_back();
14643 [
this, &LoopHelpers, &Body, &OriginalInits,
Kind](
unsigned Cnt,
14645 VarsWithInheritedDSAType TmpDSA;
14646 unsigned SingleNumLoops =
14647 checkOpenMPLoop(Kind, nullptr, nullptr, CurStmt, *this, *DSAStack,
14648 TmpDSA, LoopHelpers[Cnt]);
14649 if (SingleNumLoops == 0)
14651 assert(SingleNumLoops == 1 &&
"Expect single loop iteration space");
14652 if (auto *For = dyn_cast<ForStmt>(CurStmt)) {
14653 OriginalInits.back().push_back(For->getInit());
14654 Body = For->getBody();
14656 assert(isa<CXXForRangeStmt>(CurStmt) &&
14657 "Expected canonical for or range-based for loops.");
14658 auto *CXXFor = cast<CXXForRangeStmt>(CurStmt);
14659 OriginalInits.back().push_back(CXXFor->getBeginStmt());
14660 Body = CXXFor->getBody();
14662 OriginalInits.emplace_back();
14666 Stmt *DependentPreInits;
14667 if (
auto *Dir = dyn_cast<OMPTileDirective>(Transform))
14668 DependentPreInits = Dir->getPreInits();
14669 else if (
auto *Dir = dyn_cast<OMPUnrollDirective>(Transform))
14670 DependentPreInits = Dir->getPreInits();
14672 llvm_unreachable(
"Unhandled loop transformation");
14673 if (!DependentPreInits)
14675 llvm::append_range(OriginalInits.back(),
14676 cast<DeclStmt>(DependentPreInits)->getDeclGroup());
14678 assert(OriginalInits.back().empty() &&
"No preinit after innermost loop");
14679 OriginalInits.pop_back();
14686 auto SizesClauses =
14687 OMPExecutableDirective::getClausesOfKind<OMPSizesClause>(Clauses);
14688 if (SizesClauses.empty()) {
14701 Stmt *Body =
nullptr;
14704 if (!checkTransformableLoopNest(OMPD_tile, AStmt, NumLoops, LoopHelpers, Body,
14711 NumLoops, AStmt,
nullptr,
nullptr);
14718 FloorIndVars.resize(NumLoops);
14719 TileIndVars.resize(NumLoops);
14720 for (
unsigned I = 0; I < NumLoops; ++I) {
14723 assert(LoopHelper.
Counters.size() == 1 &&
14724 "Expect single-dimensional loop iteration space");
14725 auto *OrigCntVar = cast<DeclRefExpr>(LoopHelper.
Counters.front());
14726 std::string OrigVarName = OrigCntVar->getNameInfo().getAsString();
14733 (Twine(
".floor_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
14735 buildVarDecl(*
this, {}, CntTy, FloorCntName,
nullptr, OrigCntVar);
14736 FloorIndVars[I] = FloorCntDecl;
14742 (Twine(
".tile_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
14747 auto *TileCntDecl = cast<VarDecl>(IterVarRef->
getDecl());
14749 TileIndVars[I] = TileCntDecl;
14751 for (
auto &
P : OriginalInits[I]) {
14752 if (
auto *D =
P.dyn_cast<
Decl *>())
14753 PreInits.push_back(D);
14754 else if (
auto *PI = dyn_cast_or_null<DeclStmt>(
P.dyn_cast<
Stmt *>()))
14755 PreInits.append(PI->decl_begin(), PI->decl_end());
14757 if (
auto *PI = cast_or_null<DeclStmt>(LoopHelper.
PreInits))
14758 PreInits.append(PI->decl_begin(), PI->decl_end());
14761 auto *CounterDecl = cast<DeclRefExpr>(CounterRef)->getDecl();
14762 if (isa<OMPCapturedExprDecl>(CounterDecl))
14763 PreInits.push_back(CounterDecl);
14768 Stmt *Inner = Body;
14771 for (
int I = NumLoops - 1; I >= 0; --I) {
14774 auto *OrigCntVar = cast<DeclRefExpr>(LoopHelper.
Counters[0]);
14775 QualType CntTy = OrigCntVar->getType();
14781 OrigCntVar->getExprLoc());
14783 OrigCntVar->getExprLoc());
14788 Decl *CounterDecl = TileIndVars[I];
14791 OrigCntVar->getBeginLoc(), OrigCntVar->getEndLoc());
14798 BO_Add, FloorIV, DimTileSize);
14803 NumIterations, EndOfTile.
get());
14808 IsPartialTile.
get(), NumIterations, EndOfTile.
get());
14809 if (!MinTileAndIterSpace.
isUsable())
14812 BO_LT, TileIV, MinTileAndIterSpace.
get());
14836 BodyParts.append(LoopHelper.
Updates.begin(), LoopHelper.
Updates.end());
14837 BodyParts.push_back(Inner);
14839 Inner->getBeginLoc(), Inner->getEndLoc());
14847 for (
int I = NumLoops - 1; I >= 0; --I) {
14848 auto &LoopHelper = LoopHelpers[I];
14849 Expr *NumIterations = LoopHelper.NumIterations;
14850 DeclRefExpr *OrigCntVar = cast<DeclRefExpr>(LoopHelper.Counters[0]);
14864 Decl *CounterDecl = FloorIndVars[I];
14873 BO_LT, FloorIV, NumIterations);
14879 BO_AddAssign, FloorIV, DimTileSize);
14885 IncrStmt.
get(), Inner, LoopHelper.Init->getBeginLoc(),
14886 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
14906 OMPExecutableDirective::getSingleClause<OMPFullClause>(Clauses);
14908 OMPExecutableDirective::getSingleClause<OMPPartialClause>(Clauses);
14909 assert(!(FullClause && PartialClause) &&
14910 "mutual exclusivity must have been checked before");
14912 constexpr
unsigned NumLoops = 1;
14913 Stmt *Body =
nullptr;
14918 if (!checkTransformableLoopNest(OMPD_unroll, AStmt, NumLoops, LoopHelpers,
14919 Body, OriginalInits))
14922 unsigned NumGeneratedLoops = PartialClause ? 1 : 0;
14927 NumGeneratedLoops,
nullptr,
nullptr);
14932 if (!VerifyPositiveIntegerConstantInClause(
14936 Diag(AStmt->
getBeginLoc(), diag::err_omp_unroll_full_variable_trip_count);
14938 <<
"#pragma omp unroll full";
14946 if (NumGeneratedLoops == 0)
14948 NumGeneratedLoops,
nullptr,
nullptr);
14994 assert(OriginalInits.size() == 1 &&
14995 "Expecting a single-dimensional loop iteration space");
14996 for (
auto &
P : OriginalInits[0]) {
14997 if (
auto *D =
P.dyn_cast<
Decl *>())
14998 PreInits.push_back(D);
14999 else if (
auto *PI = dyn_cast_or_null<DeclStmt>(
P.dyn_cast<
Stmt *>()))
15000 PreInits.append(PI->decl_begin(), PI->decl_end());
15002 if (
auto *PI = cast_or_null<DeclStmt>(LoopHelper.
PreInits))
15003 PreInits.append(PI->decl_begin(), PI->decl_end());
15006 auto *CounterDecl = cast<DeclRefExpr>(CounterRef)->getDecl();
15007 if (isa<OMPCapturedExprDecl>(CounterDecl))
15008 PreInits.push_back(CounterDecl);
15011 auto *IterationVarRef = cast<DeclRefExpr>(LoopHelper.
IterationVarRef);
15012 QualType IVTy = IterationVarRef->getType();
15013 assert(LoopHelper.
Counters.size() == 1 &&
15014 "Expecting a single-dimensional loop iteration space");
15015 auto *OrigVar = cast<DeclRefExpr>(LoopHelper.
Counters.front());
15021 Factor = FactorVal->getIntegerConstantExpr(
Context)->getZExtValue();
15022 FactorLoc = FactorVal->getExprLoc();
15027 assert(Factor > 0 &&
"Expected positive unroll factor");
15028 auto MakeFactorExpr = [
this, Factor, IVTy, FactorLoc]() {
15040 std::string OrigVarName = OrigVar->getNameInfo().getAsString();
15041 std::string OuterIVName = (Twine(
".unrolled.iv.") + OrigVarName).str();
15042 std::string InnerIVName = (Twine(
".unroll_inner.iv.") + OrigVarName).str();
15044 (Twine(
".unroll_inner.tripcount.") + OrigVarName).str();
15048 buildVarDecl(*
this, {}, IVTy, OuterIVName,
nullptr, OrigVar);
15049 auto MakeOuterRef = [
this, OuterIVDecl, IVTy, OrigVarLoc]() {
15055 auto *InnerIVDecl = cast<VarDecl>(IterationVarRef->getDecl());
15057 auto MakeInnerRef = [
this, InnerIVDecl, IVTy, OrigVarLoc]() {
15063 CaptureVars CopyTransformer(*
this);
15064 auto MakeNumIterations = [&CopyTransformer, &LoopHelper]() ->
Expr * {
15085 BO_Add, MakeOuterRef(), MakeFactorExpr());
15089 BO_LT, MakeInnerRef(), EndOfTile.
get());
15094 MakeNumIterations());
15099 InnerCond1.
get(), InnerCond2.
get());
15105 UO_PreInc, MakeInnerRef());
15111 InnerBodyStmts.append(LoopHelper.
Updates.begin(), LoopHelper.
Updates.end());
15112 InnerBodyStmts.push_back(Body);
15133 LoopHintAttr *UnrollHintAttr =
15134 LoopHintAttr::CreateImplicit(
Context, LoopHintAttr::UnrollCount,
15135 LoopHintAttr::Numeric, MakeFactorExpr());
15151 MakeNumIterations());
15158 MakeOuterRef(), MakeFactorExpr());
15169 NumGeneratedLoops, OuterFor,
15182 case OMPC_num_threads:
15191 case OMPC_allocator:
15194 case OMPC_collapse:
15200 case OMPC_num_teams:
15203 case OMPC_thread_limit:
15206 case OMPC_priority:
15218 case OMPC_novariants:
15221 case OMPC_nocontext:
15236 case OMPC_ompx_dyn_cgroup_mem:
15239 case OMPC_grainsize:
15240 case OMPC_num_tasks:
15244 case OMPC_proc_bind:
15245 case OMPC_schedule:
15247 case OMPC_firstprivate:
15248 case OMPC_lastprivate:
15250 case OMPC_reduction:
15251 case OMPC_task_reduction:
15252 case OMPC_in_reduction:
15256 case OMPC_copyprivate:
15259 case OMPC_mergeable:
15260 case OMPC_threadprivate:
15262 case OMPC_allocate:
15279 case OMPC_dist_schedule:
15280 case OMPC_defaultmap:
15285 case OMPC_use_device_ptr:
15286 case OMPC_use_device_addr:
15287 case OMPC_is_device_ptr:
15288 case OMPC_unified_address:
15289 case OMPC_unified_shared_memory:
15290 case OMPC_reverse_offload:
15291 case OMPC_dynamic_allocators:
15292 case OMPC_atomic_default_mem_order:
15293 case OMPC_device_type:
15295 case OMPC_nontemporal:
15298 case OMPC_severity:
15300 case OMPC_inclusive:
15301 case OMPC_exclusive:
15302 case OMPC_uses_allocators:
15303 case OMPC_affinity:
15307 llvm_unreachable(
"Clause is not allowed.");
15324 case OMPD_target_parallel_for_simd:
15325 if (OpenMPVersion >= 50 &&
15326 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)) {
15327 CaptureRegion = OMPD_parallel;
15331 case OMPD_target_parallel:
15332 case OMPD_target_parallel_for:
15333 case OMPD_target_parallel_loop:
15336 if (NameModifier == OMPD_unknown || NameModifier == OMPD_parallel)
15337 CaptureRegion = OMPD_target;
15339 case OMPD_target_teams_distribute_parallel_for_simd:
15340 if (OpenMPVersion >= 50 &&
15341 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)) {
15342 CaptureRegion = OMPD_parallel;
15346 case OMPD_target_teams_distribute_parallel_for:
15349 if (NameModifier == OMPD_unknown || NameModifier == OMPD_parallel)
15350 CaptureRegion = OMPD_teams;
15352 case OMPD_teams_distribute_parallel_for_simd:
15353 if (OpenMPVersion >= 50 &&
15354 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)) {
15355 CaptureRegion = OMPD_parallel;
15359 case OMPD_teams_distribute_parallel_for:
15360 CaptureRegion = OMPD_teams;
15362 case OMPD_target_update:
15363 case OMPD_target_enter_data:
15364 case OMPD_target_exit_data:
15365 CaptureRegion = OMPD_task;
15367 case OMPD_parallel_masked_taskloop:
15368 if (NameModifier == OMPD_unknown || NameModifier == OMPD_taskloop)
15369 CaptureRegion = OMPD_parallel;
15371 case OMPD_parallel_master_taskloop:
15372 if (NameModifier == OMPD_unknown || NameModifier == OMPD_taskloop)
15373 CaptureRegion = OMPD_parallel;
15375 case OMPD_parallel_masked_taskloop_simd:
15376 if ((OpenMPVersion <= 45 && NameModifier == OMPD_unknown) ||
15377 NameModifier == OMPD_taskloop) {
15378 CaptureRegion = OMPD_parallel;
15381 if (OpenMPVersion <= 45)
15383 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
15384 CaptureRegion = OMPD_taskloop;
15386 case OMPD_parallel_master_taskloop_simd:
15387 if ((OpenMPVersion <= 45 && NameModifier == OMPD_unknown) ||
15388 NameModifier == OMPD_taskloop) {
15389 CaptureRegion = OMPD_parallel;
15392 if (OpenMPVersion <= 45)
15394 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
15395 CaptureRegion = OMPD_taskloop;
15397 case OMPD_parallel_for_simd:
15398 if (OpenMPVersion <= 45)
15400 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
15401 CaptureRegion = OMPD_parallel;
15403 case OMPD_taskloop_simd:
15404 case OMPD_master_taskloop_simd:
15405 case OMPD_masked_taskloop_simd:
15406 if (OpenMPVersion <= 45)
15408 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
15409 CaptureRegion = OMPD_taskloop;
15411 case OMPD_distribute_parallel_for_simd:
15412 if (OpenMPVersion <= 45)
15414 if (NameModifier == OMPD_unknown || NameModifier == OMPD_simd)
15415 CaptureRegion = OMPD_parallel;
15417 case OMPD_target_simd:
15418 if (OpenMPVersion >= 50 &&
15419 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd))
15420 CaptureRegion = OMPD_target;
15422 case OMPD_teams_distribute_simd:
15423 case OMPD_target_teams_distribute_simd:
15424 if (OpenMPVersion >= 50 &&
15425 (NameModifier == OMPD_unknown || NameModifier == OMPD_simd))
15426 CaptureRegion = OMPD_teams;
15429 case OMPD_parallel:
15430 case OMPD_parallel_master:
15431 case OMPD_parallel_masked:
15432 case OMPD_parallel_sections:
15433 case OMPD_parallel_for:
15434 case OMPD_parallel_loop:
15436 case OMPD_target_teams:
15437 case OMPD_target_teams_distribute:
15438 case OMPD_target_teams_loop:
15439 case OMPD_distribute_parallel_for:
15441 case OMPD_taskloop:
15442 case OMPD_master_taskloop:
15443 case OMPD_masked_taskloop:
15444 case OMPD_target_data:
15446 case OMPD_for_simd:
15447 case OMPD_distribute_simd:
15450 case OMPD_threadprivate:
15451 case OMPD_allocate:
15452 case OMPD_taskyield:
15455 case OMPD_taskwait:
15456 case OMPD_cancellation_point:
15460 case OMPD_declare_reduction:
15461 case OMPD_declare_mapper:
15462 case OMPD_declare_simd:
15463 case OMPD_declare_variant:
15464 case OMPD_begin_declare_variant:
15465 case OMPD_end_declare_variant:
15466 case OMPD_declare_target:
15467 case OMPD_end_declare_target:
15469 case OMPD_teams_loop:
15474 case OMPD_sections:
15479 case OMPD_critical:
15480 case OMPD_taskgroup:
15481 case OMPD_distribute:
15484 case OMPD_teams_distribute:
15485 case OMPD_requires:
15486 case OMPD_metadirective:
15487 llvm_unreachable(
"Unexpected OpenMP directive with if-clause");
15490 llvm_unreachable(
"Unknown OpenMP directive");
15493 case OMPC_num_threads:
15495 case OMPD_target_parallel:
15496 case OMPD_target_parallel_for:
15497 case OMPD_target_parallel_for_simd:
15498 case OMPD_target_parallel_loop:
15499 CaptureRegion = OMPD_target;
15501 case OMPD_teams_distribute_parallel_for:
15502 case OMPD_teams_distribute_parallel_for_simd:
15503 case OMPD_target_teams_distribute_parallel_for:
15504 case OMPD_target_teams_distribute_parallel_for_simd:
15505 CaptureRegion = OMPD_teams;
15507 case OMPD_parallel:
15508 case OMPD_parallel_master:
15509 case OMPD_parallel_masked:
15510 case OMPD_parallel_sections:
15511 case OMPD_parallel_for:
15512 case OMPD_parallel_for_simd:
15513 case OMPD_parallel_loop:
15514 case OMPD_distribute_parallel_for:
15515 case OMPD_distribute_parallel_for_simd:
15516 case OMPD_parallel_master_taskloop:
15517 case OMPD_parallel_masked_taskloop:
15518 case OMPD_parallel_master_taskloop_simd:
15519 case OMPD_parallel_masked_taskloop_simd:
15522 case OMPD_target_data:
15523 case OMPD_target_enter_data:
15524 case OMPD_target_exit_data:
15525 case OMPD_target_update:
15527 case OMPD_target_simd:
15528 case OMPD_target_teams:
15529 case OMPD_target_teams_distribute:
15530 case OMPD_target_teams_distribute_simd:
15533 case OMPD_taskloop:
15534 case OMPD_taskloop_simd:
15535 case OMPD_master_taskloop:
15536 case OMPD_masked_taskloop:
15537 case OMPD_master_taskloop_simd:
15538 case OMPD_masked_taskloop_simd:
15539 case OMPD_threadprivate:
15540 case OMPD_allocate:
15541 case OMPD_taskyield:
15544 case OMPD_taskwait:
15545 case OMPD_cancellation_point:
15549 case OMPD_declare_reduction:
15550 case OMPD_declare_mapper:
15551 case OMPD_declare_simd:
15552 case OMPD_declare_variant:
15553 case OMPD_begin_declare_variant:
15554 case OMPD_end_declare_variant:
15555 case OMPD_declare_target:
15556 case OMPD_end_declare_target:
15558 case OMPD_teams_loop:
15559 case OMPD_target_teams_loop:
15565 case OMPD_for_simd:
15566 case OMPD_sections:
15571 case OMPD_critical:
15572 case OMPD_taskgroup:
15573 case OMPD_distribute:
15576 case OMPD_distribute_simd:
15577 case OMPD_teams_distribute:
15578 case OMPD_teams_distribute_simd:
15579 case OMPD_requires:
15580 case OMPD_metadirective:
15581 llvm_unreachable(
"Unexpected OpenMP directive with num_threads-clause");
15584 llvm_unreachable(
"Unknown OpenMP directive");
15587 case OMPC_num_teams:
15589 case OMPD_target_teams:
15590 case OMPD_target_teams_distribute:
15591 case OMPD_target_teams_distribute_simd:
15592 case OMPD_target_teams_distribute_parallel_for:
15593 case OMPD_target_teams_distribute_parallel_for_simd:
15594 case OMPD_target_teams_loop:
15595 CaptureRegion = OMPD_target;
15597 case OMPD_teams_distribute_parallel_for:
15598 case OMPD_teams_distribute_parallel_for_simd:
15600 case OMPD_teams_distribute:
15601 case OMPD_teams_distribute_simd:
15602 case OMPD_teams_loop:
15605 case OMPD_distribute_parallel_for:
15606 case OMPD_distribute_parallel_for_simd:
15608 case OMPD_taskloop:
15609 case OMPD_taskloop_simd:
15610 case OMPD_master_taskloop:
15611 case OMPD_masked_taskloop:
15612 case OMPD_master_taskloop_simd:
15613 case OMPD_masked_taskloop_simd:
15614 case OMPD_parallel_master_taskloop:
15615 case OMPD_parallel_masked_taskloop:
15616 case OMPD_parallel_master_taskloop_simd:
15617 case OMPD_parallel_masked_taskloop_simd:
15618 case OMPD_target_data:
15619 case OMPD_target_enter_data:
15620 case OMPD_target_exit_data:
15621 case OMPD_target_update:
15623 case OMPD_parallel:
15624 case OMPD_parallel_master:
15625 case OMPD_parallel_masked:
15626 case OMPD_parallel_sections:
15627 case OMPD_parallel_for:
15628 case OMPD_parallel_for_simd:
15629 case OMPD_parallel_loop:
15631 case OMPD_target_simd:
15632 case OMPD_target_parallel:
15633 case OMPD_target_parallel_for:
15634 case OMPD_target_parallel_for_simd:
15635 case OMPD_target_parallel_loop:
15636 case OMPD_threadprivate:
15637 case OMPD_allocate:
15638 case OMPD_taskyield:
15641 case OMPD_taskwait:
15642 case OMPD_cancellation_point:
15646 case OMPD_declare_reduction:
15647 case OMPD_declare_mapper:
15648 case OMPD_declare_simd:
15649 case OMPD_declare_variant:
15650 case OMPD_begin_declare_variant:
15651 case OMPD_end_declare_variant:
15652 case OMPD_declare_target:
15653 case OMPD_end_declare_target:
15659 case OMPD_for_simd:
15660 case OMPD_sections:
15665 case OMPD_critical:
15666 case OMPD_taskgroup:
15667 case OMPD_distribute:
15670 case OMPD_distribute_simd:
15671 case OMPD_requires:
15672 case OMPD_metadirective:
15673 llvm_unreachable(
"Unexpected OpenMP directive with num_teams-clause");
15676 llvm_unreachable(
"Unknown OpenMP directive");
15679 case OMPC_thread_limit:
15682 case OMPD_target_teams:
15683 case OMPD_target_teams_distribute:
15684 case OMPD_target_teams_distribute_simd:
15685 case OMPD_target_teams_distribute_parallel_for:
15686 case OMPD_target_teams_distribute_parallel_for_simd:
15687 case OMPD_target_teams_loop:
15688 CaptureRegion = OMPD_target;
15690 case OMPD_teams_distribute_parallel_for:
15691 case OMPD_teams_distribute_parallel_for_simd:
15693 case OMPD_teams_distribute:
15694 case OMPD_teams_distribute_simd:
15695 case OMPD_teams_loop:
15698 case OMPD_distribute_parallel_for:
15699 case OMPD_distribute_parallel_for_simd:
15701 case OMPD_taskloop:
15702 case OMPD_taskloop_simd:
15703 case OMPD_master_taskloop:
15704 case OMPD_masked_taskloop:
15705 case OMPD_master_taskloop_simd:
15706 case OMPD_masked_taskloop_simd:
15707 case OMPD_parallel_master_taskloop:
15708 case OMPD_parallel_masked_taskloop:
15709 case OMPD_parallel_master_taskloop_simd:
15710 case OMPD_parallel_masked_taskloop_simd:
15711 case OMPD_target_data:
15712 case OMPD_target_enter_data:
15713 case OMPD_target_exit_data:
15714 case OMPD_target_update:
15716 case OMPD_parallel:
15717 case OMPD_parallel_master:
15718 case OMPD_parallel_masked:
15719 case OMPD_parallel_sections:
15720 case OMPD_parallel_for:
15721 case OMPD_parallel_for_simd:
15722 case OMPD_parallel_loop:
15723 case OMPD_target_simd:
15724 case OMPD_target_parallel:
15725 case OMPD_target_parallel_for:
15726 case OMPD_target_parallel_for_simd:
15727 case OMPD_target_parallel_loop:
15728 case OMPD_threadprivate:
15729 case OMPD_allocate:
15730 case OMPD_taskyield:
15733 case OMPD_taskwait:
15734 case OMPD_cancellation_point:
15738 case OMPD_declare_reduction:
15739 case OMPD_declare_mapper:
15740 case OMPD_declare_simd:
15741 case OMPD_declare_variant:
15742 case OMPD_begin_declare_variant:
15743 case OMPD_end_declare_variant:
15744 case OMPD_declare_target:
15745 case OMPD_end_declare_target:
15751 case OMPD_for_simd:
15752 case OMPD_sections:
15757 case OMPD_critical:
15758 case OMPD_taskgroup:
15759 case OMPD_distribute:
15762 case OMPD_distribute_simd:
15763 case OMPD_requires:
15764 case OMPD_metadirective:
15765 llvm_unreachable(
"Unexpected OpenMP directive with thread_limit-clause");
15768 llvm_unreachable(
"Unknown OpenMP directive");
15771 case OMPC_schedule:
15773 case OMPD_parallel_for:
15774 case OMPD_parallel_for_simd:
15775 case OMPD_distribute_parallel_for:
15776 case OMPD_distribute_parallel_for_simd:
15777 case OMPD_teams_distribute_parallel_for:
15778 case OMPD_teams_distribute_parallel_for_simd:
15779 case OMPD_target_parallel_for:
15780 case OMPD_target_parallel_for_simd:
15781 case OMPD_target_teams_distribute_parallel_for:
15782 case OMPD_target_teams_distribute_parallel_for_simd:
15783 CaptureRegion = OMPD_parallel;
15786 case OMPD_for_simd:
15790 case OMPD_taskloop:
15791 case OMPD_taskloop_simd:
15792 case OMPD_master_taskloop:
15793 case OMPD_masked_taskloop:
15794 case OMPD_master_taskloop_simd:
15795 case OMPD_masked_taskloop_simd:
15796 case OMPD_parallel_master_taskloop:
15797 case OMPD_parallel_masked_taskloop:
15798 case OMPD_parallel_master_taskloop_simd:
15799 case OMPD_parallel_masked_taskloop_simd:
15800 case OMPD_target_data:
15801 case OMPD_target_enter_data:
15802 case OMPD_target_exit_data:
15803 case OMPD_target_update:
15805 case OMPD_teams_distribute:
15806 case OMPD_teams_distribute_simd:
15807 case OMPD_target_teams_distribute:
15808 case OMPD_target_teams_distribute_simd:
15810 case OMPD_target_simd:
15811 case OMPD_target_parallel:
15813 case OMPD_parallel:
15814 case OMPD_parallel_master:
15815 case OMPD_parallel_masked:
15816 case OMPD_parallel_sections:
15817 case OMPD_threadprivate:
15818 case OMPD_allocate:
15819 case OMPD_taskyield:
15822 case OMPD_taskwait:
15823 case OMPD_cancellation_point:
15827 case OMPD_declare_reduction:
15828 case OMPD_declare_mapper:
15829 case OMPD_declare_simd:
15830 case OMPD_declare_variant:
15831 case OMPD_begin_declare_variant:
15832 case OMPD_end_declare_variant:
15833 case OMPD_declare_target:
15834 case OMPD_end_declare_target:
15836 case OMPD_teams_loop:
15837 case OMPD_target_teams_loop:
15838 case OMPD_parallel_loop:
15839 case OMPD_target_parallel_loop:
15843 case OMPD_sections:
15848 case OMPD_critical:
15849 case OMPD_taskgroup:
15850 case OMPD_distribute:
15853 case OMPD_distribute_simd:
15854 case OMPD_target_teams:
15855 case OMPD_requires:
15856 case OMPD_metadirective:
15857 llvm_unreachable(
"Unexpected OpenMP directive with schedule clause");
15860 llvm_unreachable(
"Unknown OpenMP directive");
15863 case OMPC_dist_schedule:
15865 case OMPD_teams_distribute_parallel_for:
15866 case OMPD_teams_distribute_parallel_for_simd:
15867 case OMPD_teams_distribute:
15868 case OMPD_teams_distribute_simd:
15869 case OMPD_target_teams_distribute_parallel_for:
15870 case OMPD_target_teams_distribute_parallel_for_simd:
15871 case OMPD_target_teams_distribute:
15872 case OMPD_target_teams_distribute_simd:
15873 CaptureRegion = OMPD_teams;
15875 case OMPD_distribute_parallel_for:
15876 case OMPD_distribute_parallel_for_simd:
15877 case OMPD_distribute:
15878 case OMPD_distribute_simd:
15881 case OMPD_parallel_for:
15882 case OMPD_parallel_for_simd:
15883 case OMPD_target_parallel_for_simd:
15884 case OMPD_target_parallel_for:
15886 case OMPD_taskloop:
15887 case OMPD_taskloop_simd:
15888 case OMPD_master_taskloop:
15889 case OMPD_masked_taskloop:
15890 case OMPD_master_taskloop_simd:
15891 case OMPD_masked_taskloop_simd:
15892 case OMPD_parallel_master_taskloop:
15893 case OMPD_parallel_masked_taskloop:
15894 case OMPD_parallel_master_taskloop_simd:
15895 case OMPD_parallel_masked_taskloop_simd:
15896 case OMPD_target_data:
15897 case OMPD_target_enter_data:
15898 case OMPD_target_exit_data:
15899 case OMPD_target_update:
15902 case OMPD_target_simd:
15903 case OMPD_target_parallel:
15905 case OMPD_parallel:
15906 case OMPD_parallel_master:
15907 case OMPD_parallel_masked:
15908 case OMPD_parallel_sections:
15909 case OMPD_threadprivate:
15910 case OMPD_allocate:
15911 case OMPD_taskyield:
15914 case OMPD_taskwait:
15915 case OMPD_cancellation_point:
15919 case OMPD_declare_reduction:
15920 case OMPD_declare_mapper:
15921 case OMPD_declare_simd:
15922 case OMPD_declare_variant:
15923 case OMPD_begin_declare_variant:
15924 case OMPD_end_declare_variant:
15925 case OMPD_declare_target:
15926 case OMPD_end_declare_target:
15928 case OMPD_teams_loop:
15929 case OMPD_target_teams_loop:
15930 case OMPD_parallel_loop:
15931 case OMPD_target_parallel_loop:
15936 case OMPD_for_simd:
15937 case OMPD_sections:
15942 case OMPD_critical:
15943 case OMPD_taskgroup:
15946 case OMPD_target_teams:
15947 case OMPD_requires:
15948 case OMPD_metadirective:
15949 llvm_unreachable(
"Unexpected OpenMP directive with dist_schedule clause");
15952 llvm_unreachable(
"Unknown OpenMP directive");
15955 case OMPC_ompx_dyn_cgroup_mem:
15958 case OMPD_target_simd:
15959 case OMPD_target_teams:
15960 case OMPD_target_parallel:
15961 case OMPD_target_teams_distribute:
15962 case OMPD_target_teams_distribute_simd:
15963 case OMPD_target_parallel_for:
15964 case OMPD_target_parallel_for_simd:
15965 case OMPD_target_parallel_loop:
15966 case OMPD_target_teams_distribute_parallel_for:
15967 case OMPD_target_teams_distribute_parallel_for_simd:
15968 case OMPD_target_teams_loop:
15969 CaptureRegion = OMPD_target;
15972 llvm_unreachable(
"Unknown OpenMP directive");
15977 case OMPD_target_update:
15978 case OMPD_target_enter_data:
15979 case OMPD_target_exit_data:
15981 case OMPD_target_simd:
15982 case OMPD_target_teams:
15983 case OMPD_target_parallel:
15984 case OMPD_target_teams_distribute:
15985 case OMPD_target_teams_distribute_simd:
15986 case OMPD_target_parallel_for:
15987 case OMPD_target_parallel_for_simd:
15988 case OMPD_target_parallel_loop:
15989 case OMPD_target_teams_distribute_parallel_for:
15990 case OMPD_target_teams_distribute_parallel_for_simd:
15991 case OMPD_target_teams_loop:
15992 case OMPD_dispatch:
15993 CaptureRegion = OMPD_task;
15995 case OMPD_target_data:
15999 case OMPD_teams_distribute_parallel_for:
16000 case OMPD_teams_distribute_parallel_for_simd:
16002 case OMPD_teams_distribute:
16003 case OMPD_teams_distribute_simd:
16004 case OMPD_distribute_parallel_for:
16005 case OMPD_distribute_parallel_for_simd:
16007 case OMPD_taskloop:
16008 case OMPD_taskloop_simd:
16009 case OMPD_master_taskloop:
16010 case OMPD_masked_taskloop:
16011 case OMPD_master_taskloop_simd:
16012 case OMPD_masked_taskloop_simd:
16013 case OMPD_parallel_master_taskloop:
16014 case OMPD_parallel_masked_taskloop:
16015 case OMPD_parallel_master_taskloop_simd:
16016 case OMPD_parallel_masked_taskloop_simd:
16018 case OMPD_parallel:
16019 case OMPD_parallel_master:
16020 case OMPD_parallel_masked:
16021 case OMPD_parallel_sections:
16022 case OMPD_parallel_for:
16023 case OMPD_parallel_for_simd:
16024 case OMPD_threadprivate:
16025 case OMPD_allocate:
16026 case OMPD_taskyield:
16029 case OMPD_taskwait:
16030 case OMPD_cancellation_point:
16034 case OMPD_declare_reduction:
16035 case OMPD_declare_mapper:
16036 case OMPD_declare_simd:
16037 case OMPD_declare_variant:
16038 case OMPD_begin_declare_variant:
16039 case OMPD_end_declare_variant:
16040 case OMPD_declare_target:
16041 case OMPD_end_declare_target:
16043 case OMPD_teams_loop:
16044 case OMPD_parallel_loop:
16049 case OMPD_for_simd:
16050 case OMPD_sections:
16055 case OMPD_critical:
16056 case OMPD_taskgroup:
16057 case OMPD_distribute:
16060 case OMPD_distribute_simd:
16061 case OMPD_requires:
16062 case OMPD_metadirective:
16063 llvm_unreachable(
"Unexpected OpenMP directive with device-clause");
16066 llvm_unreachable(
"Unknown OpenMP directive");
16069 case OMPC_grainsize:
16070 case OMPC_num_tasks:
16072 case OMPC_priority:
16075 case OMPD_taskloop:
16076 case OMPD_taskloop_simd:
16077 case OMPD_master_taskloop:
16078 case OMPD_masked_taskloop:
16079 case OMPD_master_taskloop_simd:
16080 case OMPD_masked_taskloop_simd:
16082 case OMPD_parallel_masked_taskloop:
16083 case OMPD_parallel_masked_taskloop_simd:
16084 case OMPD_parallel_master_taskloop:
16085 case OMPD_parallel_master_taskloop_simd:
16086 CaptureRegion = OMPD_parallel;
16088 case OMPD_target_update:
16089 case OMPD_target_enter_data:
16090 case OMPD_target_exit_data:
16092 case OMPD_target_simd:
16093 case OMPD_target_teams:
16094 case OMPD_target_parallel:
16095 case OMPD_target_teams_distribute:
16096 case OMPD_target_teams_distribute_simd:
16097 case OMPD_target_parallel_for:
16098 case OMPD_target_parallel_for_simd:
16099 case OMPD_target_teams_distribute_parallel_for:
16100 case OMPD_target_teams_distribute_parallel_for_simd:
16101 case OMPD_target_data:
16102 case OMPD_teams_distribute_parallel_for:
16103 case OMPD_teams_distribute_parallel_for_simd:
16105 case OMPD_teams_distribute:
16106 case OMPD_teams_distribute_simd:
16107 case OMPD_distribute_parallel_for:
16108 case OMPD_distribute_parallel_for_simd:
16110 case OMPD_parallel:
16111 case OMPD_parallel_master:
16112 case OMPD_parallel_masked:
16113 case OMPD_parallel_sections:
16114 case OMPD_parallel_for:
16115 case OMPD_parallel_for_simd:
16116 case OMPD_threadprivate:
16117 case OMPD_allocate:
16118 case OMPD_taskyield:
16121 case OMPD_taskwait:
16122 case OMPD_cancellation_point:
16126 case OMPD_declare_reduction:
16127 case OMPD_declare_mapper:
16128 case OMPD_declare_simd:
16129 case OMPD_declare_variant:
16130 case OMPD_begin_declare_variant:
16131 case OMPD_end_declare_variant:
16132 case OMPD_declare_target:
16133 case OMPD_end_declare_target:
16135 case OMPD_teams_loop:
16136 case OMPD_target_teams_loop:
16137 case OMPD_parallel_loop:
16138 case OMPD_target_parallel_loop:
16143 case OMPD_for_simd:
16144 case OMPD_sections:
16149 case OMPD_critical:
16150 case OMPD_taskgroup:
16151 case OMPD_distribute:
16154 case OMPD_distribute_simd:
16155 case OMPD_requires:
16156 case OMPD_metadirective:
16157 llvm_unreachable(
"Unexpected OpenMP directive with grainsize-clause");
16160 llvm_unreachable(
"Unknown OpenMP directive");
16163 case OMPC_novariants:
16164 case OMPC_nocontext:
16166 case OMPD_dispatch:
16167 CaptureRegion = OMPD_task;
16170 llvm_unreachable(
"Unexpected OpenMP directive");
16177 if (DKind == OMPD_metadirective) {
16178 CaptureRegion = OMPD_metadirective;
16179 }
else if (DKind == OMPD_unknown) {
16180 llvm_unreachable(
"Unknown OpenMP directive");
16182 llvm_unreachable(
"Unexpected OpenMP directive with when clause");
16185 case OMPC_firstprivate:
16186 case OMPC_lastprivate:
16187 case OMPC_reduction:
16188 case OMPC_task_reduction:
16189 case OMPC_in_reduction:
16192 case OMPC_proc_bind:
16196 case OMPC_allocator:
16197 case OMPC_collapse:
16202 case OMPC_copyprivate:
16206 case OMPC_mergeable:
16207 case OMPC_threadprivate:
16208 case OMPC_allocate:
16227 case OMPC_defaultmap:
16232 case OMPC_use_device_ptr:
16233 case OMPC_use_device_addr:
16234 case OMPC_is_device_ptr:
16235 case OMPC_unified_address:
16236 case OMPC_unified_shared_memory:
16237 case OMPC_reverse_offload:
16238 case OMPC_dynamic_allocators:
16239 case OMPC_atomic_default_mem_order:
16240 case OMPC_device_type:
16242 case OMPC_nontemporal:
16245 case OMPC_severity:
16249 case OMPC_inclusive:
16250 case OMPC_exclusive:
16251 case OMPC_uses_allocators:
16252 case OMPC_affinity:
16255 llvm_unreachable(
"Unexpected OpenMP clause.");
16257 return CaptureRegion;
16267 Stmt *HelperValStmt =
nullptr;
16270 !
Condition->isInstantiationDependent() &&
16271 !
Condition->containsUnexpandedParameterPack()) {
16276 ValExpr = Val.
get();
16280 DKind, OMPC_if,
LangOpts.OpenMP, NameModifier);
16283 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16284 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16290 OMPIfClause(NameModifier, ValExpr, HelperValStmt, CaptureRegion, StartLoc,
16291 LParenLoc, NameModifierLoc, ColonLoc, EndLoc);
16299 Stmt *HelperValStmt =
nullptr;
16302 !
Condition->isInstantiationDependent() &&
16303 !
Condition->containsUnexpandedParameterPack()) {
16315 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16316 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16322 StartLoc, LParenLoc, EndLoc);
16332 IntConvertDiagnoser()
16336 return S.
Diag(Loc, diag::err_omp_not_integral) << T;
16340 return S.
Diag(Loc, diag::err_omp_incomplete_type) << T;
16345 return S.
Diag(Loc, diag::err_omp_explicit_conversion) << T << ConvTy;
16354 return S.
Diag(Loc, diag::err_omp_ambiguous_conversion) << T;
16363 llvm_unreachable(
"conversion functions are permitted");
16365 } ConvertDiagnoser;
16371 bool StrictlyPositive,
bool BuildCapture =
false,
16374 Stmt **HelperValStmt =
nullptr) {
16380 if (
Value.isInvalid())
16383 ValExpr =
Value.get();
16385 if (std::optional<llvm::APSInt> Result =
16387 if (Result->isSigned() &&
16388 !((!StrictlyPositive && Result->isNonNegative()) ||
16389 (StrictlyPositive && Result->isStrictlyPositive()))) {
16390 SemaRef.
Diag(Loc, diag::err_omp_negative_expression_in_clause)
16391 << getOpenMPClauseName(CKind) << (StrictlyPositive ? 1 : 0)
16400 if (*CaptureRegion != OMPD_unknown &&
16403 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16404 ValExpr = tryBuildCapture(SemaRef, ValExpr, Captures).get();
16415 Expr *ValExpr = NumThreads;
16416 Stmt *HelperValStmt =
nullptr;
16429 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16430 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
16435 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
16440 bool StrictlyPositive,
16441 bool SuppressExprDiags) {
16450 if (SuppressExprDiags) {
16454 SuppressedDiagnoser() : VerifyICEDiagnoser(
true) {}
16457 llvm_unreachable(
"Diagnostic suppressed");
16467 if ((StrictlyPositive && !Result.isStrictlyPositive()) ||
16468 (!StrictlyPositive && !Result.isNonNegative())) {
16469 Diag(E->
getExprLoc(), diag::err_omp_negative_expression_in_clause)
16470 << getOpenMPClauseName(CKind) << (StrictlyPositive ? 1 : 0)
16474 if ((CKind == OMPC_aligned || CKind == OMPC_align) && !Result.isPowerOf2()) {
16475 Diag(E->
getExprLoc(), diag::warn_omp_alignment_not_power_of_two)
16479 if (CKind == OMPC_collapse &&
DSAStack->getAssociatedLoops() == 1)
16480 DSAStack->setAssociatedLoops(Result.getExtValue());
16481 else if (CKind == OMPC_ordered)
16482 DSAStack->setAssociatedLoops(Result.getExtValue());
16492 ExprResult Safelen = VerifyPositiveIntegerConstantInClause(Len, OMPC_safelen);
16505 ExprResult Simdlen = VerifyPositiveIntegerConstantInClause(Len, OMPC_simdlen);
16514 DSAStackTy *Stack) {
16515 if (!Stack->getOMPAllocatorHandleT().isNull())
16522 S.
Diag(Loc, diag::err_omp_implied_type_not_found)
16523 <<
"omp_allocator_handle_t";
16528 Stack->setOMPAllocatorHandleT(AllocatorHandleEnumTy);
16531 bool ErrorFound =
false;
16532 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
16533 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
16534 StringRef Allocator =
16535 OMPAllocateDeclAttr::ConvertAllocatorTypeTyToStr(AllocatorKind);
16537 auto *VD = dyn_cast_or_null<ValueDecl>(
16557 Stack->setAllocator(AllocatorKind, Res.
get());
16560 S.
Diag(Loc, diag::err_omp_implied_type_not_found)
16561 <<
"omp_allocator_handle_t";
16577 if (Allocator.isInvalid())
16580 DSAStack->getOMPAllocatorHandleT(),
16583 if (Allocator.isInvalid())
16599 VerifyPositiveIntegerConstantInClause(NumForLoops, OMPC_collapse);
16609 Expr *NumForLoops) {
16615 if (NumForLoops && LParenLoc.
isValid()) {
16617 VerifyPositiveIntegerConstantInClause(NumForLoops, OMPC_ordered);
16620 NumForLoops = NumForLoopsResult.
get();
16622 NumForLoops =
nullptr;
16625 Context, NumForLoops, NumForLoops ?
DSAStack->getAssociatedLoops() : 0,
16626 StartLoc, LParenLoc, EndLoc);
16627 DSAStack->setOrderedRegion(
true, NumForLoops, Clause);
16638 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
16640 case OMPC_proc_bind:
16642 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
16644 case OMPC_atomic_default_mem_order:
16647 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
16651 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
16655 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
16659 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
16661 case OMPC_severity:
16664 LParenLoc, EndLoc);
16668 case OMPC_num_threads:
16672 case OMPC_allocator:
16673 case OMPC_collapse:
16674 case OMPC_schedule:
16676 case OMPC_firstprivate:
16677 case OMPC_lastprivate:
16679 case OMPC_reduction:
16680 case OMPC_task_reduction:
16681 case OMPC_in_reduction:
16685 case OMPC_copyprivate:
16689 case OMPC_mergeable:
16690 case OMPC_threadprivate:
16691 case OMPC_allocate:
16708 case OMPC_num_teams:
16709 case OMPC_thread_limit:
16710 case OMPC_priority:
16711 case OMPC_grainsize:
16713 case OMPC_num_tasks:
16715 case OMPC_dist_schedule:
16716 case OMPC_defaultmap:
16721 case OMPC_use_device_ptr:
16722 case OMPC_use_device_addr:
16723 case OMPC_is_device_ptr:
16724 case OMPC_has_device_addr:
16725 case OMPC_unified_address:
16726 case OMPC_unified_shared_memory:
16727 case OMPC_reverse_offload:
16728 case OMPC_dynamic_allocators:
16729 case OMPC_device_type:
16731 case OMPC_nontemporal:
16733 case OMPC_novariants:
16734 case OMPC_nocontext:
16736 case OMPC_inclusive:
16737 case OMPC_exclusive:
16738 case OMPC_uses_allocators:
16739 case OMPC_affinity:
16743 llvm_unreachable(
"Clause is not allowed.");
16752 llvm::raw_svector_ostream Out(Buffer);
16753 unsigned Skipped = Exclude.size();
16754 for (
unsigned I =
First; I <
Last; ++I) {
16755 if (llvm::is_contained(Exclude, I)) {
16760 if (I + Skipped + 2 ==
Last)
16762 else if (I + Skipped + 1 !=
Last)
16773 if (
Kind == OMP_DEFAULT_unknown) {
16774 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16776 unsigned(OMP_DEFAULT_unknown))
16777 << getOpenMPClauseName(OMPC_default);
16782 case OMP_DEFAULT_none:
16783 DSAStack->setDefaultDSANone(KindKwLoc);
16785 case OMP_DEFAULT_shared:
16786 DSAStack->setDefaultDSAShared(KindKwLoc);
16788 case OMP_DEFAULT_firstprivate:
16789 DSAStack->setDefaultDSAFirstPrivate(KindKwLoc);
16791 case OMP_DEFAULT_private:
16792 DSAStack->setDefaultDSAPrivate(KindKwLoc);
16795 llvm_unreachable(
"DSA unexpected in OpenMP default clause");
16807 if (
Kind == OMP_PROC_BIND_unknown) {
16808 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16810 unsigned(OMP_PROC_BIND_master),
16813 ? OMP_PROC_BIND_primary
16814 : OMP_PROC_BIND_spread) +
16816 << getOpenMPClauseName(OMPC_proc_bind);
16819 if (
Kind == OMP_PROC_BIND_primary &&
LangOpts.OpenMP < 51)
16820 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16822 unsigned(OMP_PROC_BIND_master),
16824 unsigned(OMP_PROC_BIND_spread) + 1)
16825 << getOpenMPClauseName(OMPC_proc_bind);
16834 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16836 OMPC_atomic_default_mem_order, 0,
16838 << getOpenMPClauseName(OMPC_atomic_default_mem_order);
16842 LParenLoc, EndLoc);
16851 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16854 << getOpenMPClauseName(OMPC_at);
16867 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16870 << getOpenMPClauseName(OMPC_severity);
16880 assert(ME &&
"NULL expr in Message clause");
16881 if (!isa<StringLiteral>(ME)) {
16883 << getOpenMPClauseName(OMPC_message);
16893 if (
Kind != OMPC_ORDER_concurrent ||
16898 "OMPC_ORDER_unknown not greater than 0");
16900 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
16904 << getOpenMPClauseName(OMPC_order);
16909 Diag(MLoc, diag::err_omp_unexpected_clause_value)
16913 << getOpenMPClauseName(OMPC_order);
16915 DSAStack->setRegionHasOrderConcurrent(
true);
16918 unsigned existingFlags =
DSAStack->getCurScope()->getFlags();
16919 DSAStack->getCurScope()->setFlags(existingFlags |
16925 EndLoc, Modifier, MLoc);
16934 Kind == OMPC_DEPEND_sink ||
Kind == OMPC_DEPEND_depobj) {
16936 OMPC_DEPEND_source, OMPC_DEPEND_sink, OMPC_DEPEND_depobj,
16937 OMPC_DEPEND_outallmemory, OMPC_DEPEND_inoutallmemory};
16939 Except.push_back(OMPC_DEPEND_inoutset);
16940 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
16943 << getOpenMPClauseName(OMPC_update);
16954 for (
Expr *SizeExpr : SizeExprs) {
16955 ExprResult NumForLoopsResult = VerifyPositiveIntegerConstantInClause(
16956 SizeExpr, OMPC_sizes,
true);
16957 if (!NumForLoopsResult.
isUsable())
16961 DSAStack->setAssociatedLoops(SizeExprs.size());
16978 ExprResult FactorResult = VerifyPositiveIntegerConstantInClause(
16979 FactorExpr, OMPC_partial,
true);
16982 FactorExpr = FactorResult.
get();
16993 AlignVal = VerifyPositiveIntegerConstantInClause(A, OMPC_align);
17007 case OMPC_schedule:
17008 enum { Modifier1, Modifier2, ScheduleKind, NumberOfElements };
17009 assert(Argument.size() == NumberOfElements &&
17010 ArgumentLoc.size() == NumberOfElements);
17015 StartLoc, LParenLoc, ArgumentLoc[Modifier1], ArgumentLoc[Modifier2],
17016 ArgumentLoc[ScheduleKind], DelimLoc, EndLoc);
17019 assert(Argument.size() == 1 && ArgumentLoc.size() == 1);
17021 Expr, StartLoc, LParenLoc, ArgumentLoc.back(),
17024 case OMPC_dist_schedule:
17027 StartLoc, LParenLoc, ArgumentLoc.back(), DelimLoc, EndLoc);
17029 case OMPC_defaultmap:
17030 enum { Modifier, DefaultmapKind };
17034 StartLoc, LParenLoc, ArgumentLoc[Modifier], ArgumentLoc[DefaultmapKind],
17038 enum { OrderModifier, OrderKind };
17042 LParenLoc, ArgumentLoc[OrderModifier], ArgumentLoc[OrderKind], EndLoc);
17045 assert(Argument.size() == 1 && ArgumentLoc.size() == 1);
17048 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17050 case OMPC_grainsize:
17051 assert(Argument.size() == 1 && ArgumentLoc.size() == 1 &&
17052 "Modifier for grainsize clause and its location are expected.");
17055 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17057 case OMPC_num_tasks:
17058 assert(Argument.size() == 1 && ArgumentLoc.size() == 1 &&
17059 "Modifier for num_tasks clause and its location are expected.");
17062 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17065 case OMPC_num_threads:
17069 case OMPC_allocator:
17070 case OMPC_collapse:
17072 case OMPC_proc_bind:
17074 case OMPC_firstprivate:
17075 case OMPC_lastprivate:
17077 case OMPC_reduction:
17078 case OMPC_task_reduction:
17079 case OMPC_in_reduction:
17083 case OMPC_copyprivate:
17087 case OMPC_mergeable:
17088 case OMPC_threadprivate:
17089 case OMPC_allocate:
17106 case OMPC_num_teams:
17107 case OMPC_thread_limit:
17108 case OMPC_priority:
17115 case OMPC_use_device_ptr:
17116 case OMPC_use_device_addr:
17117 case OMPC_is_device_ptr:
17118 case OMPC_has_device_addr:
17119 case OMPC_unified_address:
17120 case OMPC_unified_shared_memory:
17121 case OMPC_reverse_offload:
17122 case OMPC_dynamic_allocators:
17123 case OMPC_atomic_default_mem_order:
17124 case OMPC_device_type:
17126 case OMPC_nontemporal:
17128 case OMPC_severity:
17131 case OMPC_novariants:
17132 case OMPC_nocontext:
17134 case OMPC_inclusive:
17135 case OMPC_exclusive:
17136 case OMPC_uses_allocators:
17137 case OMPC_affinity:
17141 llvm_unreachable(
"Clause is not allowed.");
17152 Excluded.push_back(M2);
17153 if (M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic)
17154 Excluded.push_back(OMPC_SCHEDULE_MODIFIER_monotonic);
17155 if (M2 == OMPC_SCHEDULE_MODIFIER_monotonic)
17156 Excluded.push_back(OMPC_SCHEDULE_MODIFIER_nonmonotonic);
17157 S.
Diag(M1Loc, diag::err_omp_unexpected_clause_value)
17162 << getOpenMPClauseName(OMPC_schedule);
17180 (M1 == OMPC_SCHEDULE_MODIFIER_monotonic &&
17181 M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic) ||
17182 (M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic &&
17183 M2 == OMPC_SCHEDULE_MODIFIER_monotonic)) {
17184 Diag(M2Loc, diag::err_omp_unexpected_schedule_modifier)
17200 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
17201 << Values << getOpenMPClauseName(OMPC_schedule);
17209 (M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic ||
17210 M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
17211 Kind != OMPC_SCHEDULE_dynamic &&
Kind != OMPC_SCHEDULE_guided) {
17212 Diag(M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic ? M1Loc : M2Loc,
17213 diag::err_omp_schedule_nonmonotonic_static);
17216 Expr *ValExpr = ChunkSize;
17217 Stmt *HelperValStmt =
nullptr;
17228 ValExpr = Val.
get();
17233 if (std::optional<llvm::APSInt> Result =
17235 if (Result->isSigned() && !Result->isStrictlyPositive()) {
17236 Diag(ChunkSizeLoc, diag::err_omp_negative_expression_in_clause)
17241 DSAStack->getCurrentDirective(), OMPC_schedule,
17242 LangOpts.OpenMP) != OMPD_unknown &&
17245 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
17246 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
17254 ValExpr, HelperValStmt, M1, M1Loc, M2, M2Loc);
17271 case OMPC_mergeable:
17313 case OMPC_unified_address:
17316 case OMPC_unified_shared_memory:
17319 case OMPC_reverse_offload:
17322 case OMPC_dynamic_allocators:
17338 case OMPC_num_threads:
17342 case OMPC_allocator:
17343 case OMPC_collapse:
17344 case OMPC_schedule:
17346 case OMPC_firstprivate:
17347 case OMPC_lastprivate:
17349 case OMPC_reduction:
17350 case OMPC_task_reduction:
17351 case OMPC_in_reduction:
17355 case OMPC_copyprivate:
17357 case OMPC_proc_bind:
17358 case OMPC_threadprivate:
17359 case OMPC_allocate:
17365 case OMPC_num_teams:
17366 case OMPC_thread_limit:
17367 case OMPC_priority:
17368 case OMPC_grainsize:
17369 case OMPC_num_tasks:
17371 case OMPC_dist_schedule:
17372 case OMPC_defaultmap:
17377 case OMPC_use_device_ptr:
17378 case OMPC_use_device_addr:
17379 case OMPC_is_device_ptr:
17380 case OMPC_has_device_addr:
17381 case OMPC_atomic_default_mem_order:
17382 case OMPC_device_type:
17384 case OMPC_nontemporal:
17387 case OMPC_severity:
17389 case OMPC_novariants:
17390 case OMPC_nocontext:
17392 case OMPC_inclusive:
17393 case OMPC_exclusive:
17394 case OMPC_uses_allocators:
17395 case OMPC_affinity:
17397 case OMPC_ompx_dyn_cgroup_mem:
17399 llvm_unreachable(
"Clause is not allowed.");
17512 if (!
hasClauses(Clauses, OMPC_init, OMPC_use, OMPC_destroy, OMPC_nowait)) {
17513 StringRef
Expected =
"'init', 'use', 'destroy', or 'nowait'";
17514 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
17515 <<
Expected << getOpenMPDirectiveName(OMPD_interop);
17528 bool HasInitClause =
false;
17529 bool IsTargetSync =
false;
17533 if (
const auto *InitClause = dyn_cast<OMPInitClause>(C)) {
17534 HasInitClause =
true;
17535 if (InitClause->getIsTargetSync())
17536 IsTargetSync =
true;
17537 }
else if (
const auto *DC = dyn_cast<OMPDependClause>(C)) {
17541 if (DependClause && HasInitClause && !IsTargetSync) {
17542 Diag(DependClause->
getBeginLoc(), diag::err_omp_interop_bad_depend_clause);
17556 if (ClauseKind == OMPC_init) {
17557 auto *E = cast<OMPInitClause>(C)->getInteropVar();
17559 }
else if (ClauseKind == OMPC_use) {
17560 auto *E = cast<OMPUseClause>(C)->getInteropVar();
17562 }
else if (ClauseKind == OMPC_destroy) {
17563 auto *E = cast<OMPDestroyClause>(C)->getInteropVar();
17568 if (!InteropVars.insert(
DeclResult.first).second) {
17569 Diag(ELoc, diag::err_omp_interop_var_multiple_actions)
17584 Expr *RefExpr = InteropVarExpr;
17587 false,
"omp_interop_t");
17598 bool HasError =
false;
17604 if (
const auto *TD = dyn_cast<TypeDecl>(ND)) {
17605 InteropType =
QualType(TD->getTypeForDecl(), 0);
17614 SemaRef.
Diag(VarLoc, diag::err_omp_implied_type_not_found)
17615 <<
"omp_interop_t";
17621 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_wrong_type);
17627 if ((
Kind == OMPC_init ||
Kind == OMPC_destroy) &&
17629 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_expected)
17652 if (isa<StringLiteral>(E))
17659 LParenLoc, VarLoc, EndLoc);
17671 OMPUseClause(InteropVar, StartLoc, LParenLoc, VarLoc, EndLoc);
17679 if (!InteropVar &&
LangOpts.OpenMP >= 52 &&
17680 DSAStack->getCurrentDirective() == OMPD_depobj) {
17681 Diag(StartLoc, diag::err_omp_expected_clause_argument)
17682 << getOpenMPClauseName(OMPC_destroy)
17683 << getOpenMPDirectiveName(OMPD_depobj);
17699 Stmt *HelperValStmt =
nullptr;
17702 !
Condition->isInstantiationDependent() &&
17703 !
Condition->containsUnexpandedParameterPack()) {
17715 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
17716 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
17722 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
17730 Stmt *HelperValStmt =
nullptr;
17733 !
Condition->isInstantiationDependent() &&
17734 !
Condition->containsUnexpandedParameterPack()) {
17746 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
17747 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
17753 StartLoc, LParenLoc, EndLoc);
17760 Expr *ValExpr = ThreadID;
17761 Stmt *HelperValStmt =
nullptr;
17768 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
17769 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
17774 StartLoc, LParenLoc, EndLoc);
17785 int ExtraModifier =
Data.ExtraModifier;
17792 case OMPC_firstprivate:
17795 case OMPC_lastprivate:
17797 "Unexpected lastprivate modifier.");
17800 ExtraModifierLoc, ColonLoc, StartLoc, LParenLoc, EndLoc);
17805 case OMPC_reduction:
17807 "Unexpected lastprivate modifier.");
17810 StartLoc, LParenLoc, ExtraModifierLoc, ColonLoc, EndLoc,
17811 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId);
17813 case OMPC_task_reduction:
17815 VarList, StartLoc, LParenLoc, ColonLoc, EndLoc,
17816 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId);
17818 case OMPC_in_reduction:
17820 VarList, StartLoc, LParenLoc, ColonLoc, EndLoc,
17821 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId);
17825 "Unexpected linear modifier.");
17827 VarList,
Data.DepModOrTailExpr, StartLoc, LParenLoc,
17833 LParenLoc, ColonLoc, EndLoc);
17838 case OMPC_copyprivate:
17846 "Unexpected depend modifier.");
17849 ColonLoc,
Data.OmpAllMemoryLoc},
17850 Data.DepModOrTailExpr, VarList, StartLoc, LParenLoc, EndLoc);
17854 "Unexpected map modifier.");
17856 Data.IteratorExpr,
Data.MapTypeModifiers,
Data.MapTypeModifiersLoc,
17857 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId,
17859 ExtraModifierLoc, ColonLoc, VarList, Locs);
17864 Data.ReductionOrMapperIdScopeSpec,
17865 Data.ReductionOrMapperId, ColonLoc, VarList, Locs);
17869 Data.ReductionOrMapperIdScopeSpec,
17870 Data.ReductionOrMapperId, ColonLoc, VarList,
17873 case OMPC_use_device_ptr:
17876 case OMPC_use_device_addr:
17879 case OMPC_is_device_ptr:
17882 case OMPC_has_device_addr:
17885 case OMPC_allocate:
17887 LParenLoc, ColonLoc, EndLoc);
17889 case OMPC_nontemporal:
17892 case OMPC_inclusive:
17895 case OMPC_exclusive:
17898 case OMPC_affinity:
17900 Data.DepModOrTailExpr, VarList);
17905 case OMPC_num_threads:
17909 case OMPC_allocator:
17910 case OMPC_collapse:
17912 case OMPC_proc_bind:
17913 case OMPC_schedule:
17917 case OMPC_mergeable:
17918 case OMPC_threadprivate:
17932 case OMPC_num_teams:
17933 case OMPC_thread_limit:
17934 case OMPC_priority:
17935 case OMPC_grainsize:
17937 case OMPC_num_tasks:
17939 case OMPC_dist_schedule:
17940 case OMPC_defaultmap:
17943 case OMPC_unified_address:
17944 case OMPC_unified_shared_memory:
17945 case OMPC_reverse_offload:
17946 case OMPC_dynamic_allocators:
17947 case OMPC_atomic_default_mem_order:
17948 case OMPC_device_type:
17952 case OMPC_severity:
17955 case OMPC_novariants:
17956 case OMPC_nocontext:
17958 case OMPC_uses_allocators:
17962 llvm_unreachable(
"Clause is not allowed.");
17992 bool IsImplicitClause =
17994 for (
Expr *RefExpr : VarList) {
17995 assert(RefExpr &&
"NULL expr in OpenMP private clause.");
17998 Expr *SimpleRefExpr = RefExpr;
18002 Vars.push_back(RefExpr);
18003 PrivateCopies.push_back(
nullptr);
18010 auto *VD = dyn_cast<VarDecl>(D);
18017 Type =
Type.getNonReferenceType();
18037 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
18038 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_private) {
18039 Diag(ELoc, diag::err_omp_wrong_dsa) << getOpenMPClauseName(DVar.CKind)
18040 << getOpenMPClauseName(OMPC_private);
18049 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
18050 << getOpenMPClauseName(OMPC_private) <<
Type
18051 << getOpenMPDirectiveName(CurrDir);
18052 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
18055 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18069 CurrDir == OMPD_target) {
18071 if (
DSAStack->checkMappableExprComponentListsForDecl(
18075 ConflictKind = WhereFoundClauseKind;
18078 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
18079 << getOpenMPClauseName(OMPC_private)
18080 << getOpenMPClauseName(ConflictKind)
18081 << getOpenMPDirectiveName(CurrDir);
18100 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
18109 auto *FD = dyn_cast<FieldDecl>(D);
18113 RefExpr->getExprLoc());
18117 if (!IsImplicitClause)
18118 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_private, Ref);
18120 ? RefExpr->IgnoreParens()
18122 PrivateCopies.push_back(VDPrivateRefExpr);
18140 bool IsImplicitClause =
18144 for (
Expr *RefExpr : VarList) {
18145 assert(RefExpr &&
"NULL expr in OpenMP firstprivate clause.");
18148 Expr *SimpleRefExpr = RefExpr;
18152 Vars.push_back(RefExpr);
18153 PrivateCopies.push_back(
nullptr);
18154 Inits.push_back(
nullptr);
18160 ELoc = IsImplicitClause ? ImplicitClauseLoc : ELoc;
18162 auto *VD = dyn_cast<VarDecl>(D);
18168 diag::err_omp_firstprivate_incomplete_type))
18170 Type =
Type.getNonReferenceType();
18179 DSAStackTy::DSAVarData TopDVar;
18180 if (!IsImplicitClause) {
18181 DSAStackTy::DSAVarData DVar =
18193 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_firstprivate &&
18195 DVar.CKind != OMPC_lastprivate) &&
18197 Diag(ELoc, diag::err_omp_wrong_dsa)
18198 << getOpenMPClauseName(DVar.CKind)
18199 << getOpenMPClauseName(OMPC_firstprivate);
18215 if (!(IsConstant || (VD && VD->isStaticDataMember())) && !DVar.RefExpr &&
18216 DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_shared) {
18217 Diag(ELoc, diag::err_omp_wrong_dsa)
18218 << getOpenMPClauseName(DVar.CKind)
18219 << getOpenMPClauseName(OMPC_firstprivate);
18243 DVar =
DSAStack->getImplicitDSA(D,
true);
18244 if (DVar.CKind != OMPC_shared &&
18247 DVar.DKind == OMPD_unknown)) {
18248 Diag(ELoc, diag::err_omp_required_access)
18249 << getOpenMPClauseName(OMPC_firstprivate)
18250 << getOpenMPClauseName(OMPC_shared);
18270 return C == OMPC_reduction && !AppliedToPointee;
18278 if (DVar.CKind == OMPC_reduction &&
18282 Diag(ELoc, diag::err_omp_parallel_reduction_in_task_firstprivate)
18283 << getOpenMPDirectiveName(DVar.DKind);
18299 CurrDir == OMPD_target) {
18301 if (
DSAStack->checkMappableExprComponentListsForDecl(
18306 ConflictKind = WhereFoundClauseKind;
18309 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
18310 << getOpenMPClauseName(OMPC_firstprivate)
18311 << getOpenMPClauseName(ConflictKind)
18312 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
18322 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
18323 << getOpenMPClauseName(OMPC_firstprivate) <<
Type
18324 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
18325 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
18328 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18337 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
18343 Expr *VDInitRefExpr =
nullptr;
18353 ".firstprivate.temp");
18360 if (Result.isInvalid())
18368 ".firstprivate.temp");
18370 RefExpr->getExprLoc());
18376 if (IsImplicitClause) {
18377 Diag(RefExpr->getExprLoc(),
18378 diag::note_omp_task_predetermined_firstprivate_here);
18385 RefExpr->getExprLoc());
18388 if (TopDVar.CKind == OMPC_lastprivate) {
18389 Ref = TopDVar.PrivateCopy;
18391 auto *FD = dyn_cast<FieldDecl>(D);
18395 RefExpr->getExprLoc());
18399 ExprCaptures.push_back(Ref->
getDecl());
18402 if (!IsImplicitClause)
18403 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
18405 ? RefExpr->IgnoreParens()
18407 PrivateCopies.push_back(VDPrivateRefExpr);
18408 Inits.push_back(VDInitRefExpr);
18415 Vars, PrivateCopies, Inits,
18424 assert(ColonLoc.
isValid() &&
"Colon location must be valid.");
18425 Diag(LPKindLoc, diag::err_omp_unexpected_clause_value)
18428 << getOpenMPClauseName(OMPC_lastprivate);
18438 for (
Expr *RefExpr : VarList) {
18439 assert(RefExpr &&
"NULL expr in OpenMP lastprivate clause.");
18442 Expr *SimpleRefExpr = RefExpr;
18446 Vars.push_back(RefExpr);
18447 SrcExprs.push_back(
nullptr);
18448 DstExprs.push_back(
nullptr);
18449 AssignmentOps.push_back(
nullptr);
18456 auto *VD = dyn_cast<VarDecl>(D);
18462 diag::err_omp_lastprivate_incomplete_type))
18464 Type =
Type.getNonReferenceType();
18481 Diag(ELoc, diag::err_omp_lastprivate_conditional_non_scalar);
18482 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
18485 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18499 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
18500 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_lastprivate &&
18502 DVar.CKind != OMPC_firstprivate) &&
18503 (DVar.CKind != OMPC_private || DVar.RefExpr !=
nullptr)) {
18504 Diag(ELoc, diag::err_omp_wrong_dsa)
18505 << getOpenMPClauseName(DVar.CKind)
18506 << getOpenMPClauseName(OMPC_lastprivate);
18517 DSAStackTy::DSAVarData TopDVar = DVar;
18521 DVar =
DSAStack->getImplicitDSA(D,
true);
18522 if (DVar.CKind != OMPC_shared) {
18523 Diag(ELoc, diag::err_omp_required_access)
18524 << getOpenMPClauseName(OMPC_lastprivate)
18525 << getOpenMPClauseName(OMPC_shared);
18541 Type.getUnqualifiedType(),
".lastprivate.src",
18552 PseudoDstExpr, PseudoSrcExpr);
18562 if (TopDVar.CKind == OMPC_firstprivate) {
18563 Ref = TopDVar.PrivateCopy;
18567 ExprCaptures.push_back(Ref->
getDecl());
18569 if ((TopDVar.CKind == OMPC_firstprivate && !TopDVar.PrivateCopy) ||
18580 ExprPostUpdates.push_back(
18584 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_lastprivate, Ref);
18586 ? RefExpr->IgnoreParens()
18588 SrcExprs.push_back(PseudoSrcExpr);
18589 DstExprs.push_back(PseudoDstExpr);
18590 AssignmentOps.push_back(AssignmentOp.
get());
18597 Vars, SrcExprs, DstExprs, AssignmentOps,
18598 LPKind, LPKindLoc, ColonLoc,
18608 for (
Expr *RefExpr : VarList) {
18609 assert(RefExpr &&
"NULL expr in OpenMP lastprivate clause.");
18612 Expr *SimpleRefExpr = RefExpr;
18616 Vars.push_back(RefExpr);
18622 auto *VD = dyn_cast<VarDecl>(D);
18630 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
18631 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_shared &&
18633 Diag(ELoc, diag::err_omp_wrong_dsa) << getOpenMPClauseName(DVar.CKind)
18634 << getOpenMPClauseName(OMPC_shared);
18642 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_shared, Ref);
18644 ? RefExpr->IgnoreParens()
18655 class DSARefChecker :
public StmtVisitor<DSARefChecker, bool> {
18660 if (
auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
18661 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
18662 if (DVar.CKind == OMPC_shared && !DVar.RefExpr)
18664 if (DVar.CKind != OMPC_unknown)
18666 DSAStackTy::DSAVarData DVarPrivate = Stack->hasDSA(
18673 return DVarPrivate.CKind != OMPC_unknown;
18677 bool VisitStmt(
Stmt *S) {
18678 for (
Stmt *Child : S->children()) {
18679 if (Child && Visit(Child))
18684 explicit DSARefChecker(DSAStackTy *S) : Stack(S) {}
18691 class TransformExprToCaptures :
public TreeTransform<TransformExprToCaptures> {
18698 : BaseTransform(SemaRef),
Field(
FieldDecl), CapturedExpr(nullptr) {}
18703 CapturedExpr =
buildCapture(SemaRef, Field, E,
false);
18704 return CapturedExpr;
18706 return BaseTransform::TransformMemberExpr(E);
18708 DeclRefExpr *getCapturedExpr() {
return CapturedExpr; }
18712 template <
typename T,
typename U>
18715 for (
U &Set : Lookups) {
18716 for (
auto *D : Set) {
18717 if (T Res = Gen(cast<ValueDecl>(D)))
18727 for (
auto *RD : D->
redecls()) {
18732 auto ND = cast<NamedDecl>(RD);
18750 AssociatedClasses);
18763 for (
auto *NS : AssociatedNamespaces) {
18776 for (
auto *D : R) {
18777 auto *Underlying = D;
18778 if (
auto *USD = dyn_cast<UsingShadowDecl>(D))
18779 Underlying = USD->getTargetDecl();
18781 if (!isa<OMPDeclareReductionDecl>(Underlying) &&
18782 !isa<OMPDeclareMapperDecl>(Underlying))
18789 if (
auto *USD = dyn_cast<UsingShadowDecl>(D))
18790 Underlying = USD->getTargetDecl();
18792 Lookups.emplace_back();
18793 Lookups.back().addDecl(Underlying);
18812 S = S->getParent();
18813 }
while (S && !S->isDeclScope(D));
18815 S = S->getParent();
18816 Lookups.emplace_back();
18817 Lookups.back().append(Lookup.
begin(), Lookup.
end());
18820 }
else if (
auto *ULE =
18821 cast_or_null<UnresolvedLookupExpr>(UnresolvedReduction)) {
18823 Decl *PrevD =
nullptr;
18827 else if (
auto *DRD = dyn_cast<OMPDeclareReductionDecl>(D))
18828 Lookups.back().addDecl(DRD);
18835 filterLookupForUDReductionAndMapper<bool>(Lookups, [](
ValueDecl *D) {
18836 return !D->isInvalidDecl() &&
18837 (D->getType()->isDependentType() ||
18838 D->getType()->isInstantiationDependentType() ||
18839 D->getType()->containsUnexpandedParameterPack());
18845 ResSet.
append(Set.begin(), Set.end());
18847 ResSet.
addDecl(Set[Set.size() - 1]);
18852 true,
true, ResSet.
begin(), ResSet.
end());
18872 if (SemaRef.
isCompleteType(Loc, Ty) || TyRec->isBeingDefined() ||
18873 TyRec->getDecl()->getDefinition()) {
18876 if (Lookup.
empty()) {
18877 Lookups.emplace_back();
18878 Lookups.back().append(Lookup.
begin(), Lookup.
end());
18885 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
18895 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
18909 Loc, VD->
getType(), Ty, Paths.front(),
18919 if (ReductionIdScopeSpec.
isSet()) {
18920 SemaRef.
Diag(Loc, diag::err_omp_not_resolved_reduction_identifier)
18929 struct ReductionData {
18954 unsigned RedModifier = 0;
18955 ReductionData() =
delete;
18957 ReductionData(
unsigned Size,
unsigned Modifier = 0) : RedModifier(Modifier) {
18958 Vars.reserve(Size);
18959 Privates.reserve(Size);
18960 LHSs.reserve(Size);
18961 RHSs.reserve(Size);
18962 ReductionOps.reserve(Size);
18963 if (RedModifier == OMPC_REDUCTION_inscan) {
18964 InscanCopyOps.reserve(Size);
18965 InscanCopyArrayTemps.reserve(Size);
18966 InscanCopyArrayElems.reserve(Size);
18968 TaskgroupDescriptors.reserve(Size);
18969 ExprCaptures.reserve(Size);
18970 ExprPostUpdates.reserve(Size);
18974 void push(
Expr *Item,
Expr *ReductionOp) {
18975 Vars.emplace_back(Item);
18976 Privates.emplace_back(
nullptr);
18977 LHSs.emplace_back(
nullptr);
18978 RHSs.emplace_back(
nullptr);
18979 ReductionOps.emplace_back(ReductionOp);
18980 TaskgroupDescriptors.emplace_back(
nullptr);
18981 if (RedModifier == OMPC_REDUCTION_inscan) {
18982 InscanCopyOps.push_back(
nullptr);
18983 InscanCopyArrayTemps.push_back(
nullptr);
18984 InscanCopyArrayElems.push_back(
nullptr);
18989 Expr *TaskgroupDescriptor,
Expr *CopyOp,
Expr *CopyArrayTemp,
18990 Expr *CopyArrayElem) {
18991 Vars.emplace_back(Item);
18992 Privates.emplace_back(Private);
18993 LHSs.emplace_back(LHS);
18994 RHSs.emplace_back(RHS);
18995 ReductionOps.emplace_back(ReductionOp);
18996 TaskgroupDescriptors.emplace_back(TaskgroupDescriptor);
18997 if (RedModifier == OMPC_REDUCTION_inscan) {
18998 InscanCopyOps.push_back(CopyOp);
18999 InscanCopyArrayTemps.push_back(CopyArrayTemp);
19000 InscanCopyArrayElems.push_back(CopyArrayElem);
19002 assert(CopyOp ==
nullptr && CopyArrayTemp ==
nullptr &&
19003 CopyArrayElem ==
nullptr &&
19004 "Copy operation must be used for inscan reductions only.");
19014 if (Length ==
nullptr) {
19021 SingleElement =
true;
19022 ArraySizes.push_back(llvm::APSInt::get(1));
19028 llvm::APSInt ConstantLengthValue = Result.Val.getInt();
19029 SingleElement = (ConstantLengthValue.getSExtValue() == 1);
19030 ArraySizes.push_back(ConstantLengthValue);
19038 while (
const auto *TempOASE = dyn_cast<OMPArraySectionExpr>(
Base)) {
19039 Length = TempOASE->getLength();
19040 if (Length ==
nullptr) {
19047 ArraySizes.push_back(llvm::APSInt::get(1));
19053 llvm::APSInt ConstantLengthValue = Result.Val.getInt();
19054 if (ConstantLengthValue.getSExtValue() != 1)
19057 ArraySizes.push_back(ConstantLengthValue);
19059 Base = TempOASE->getBase()->IgnoreParenImpCasts();
19063 if (!SingleElement) {
19064 while (
const auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base)) {
19066 ArraySizes.push_back(llvm::APSInt::get(1));
19067 Base = TempASE->getBase()->IgnoreParenImpCasts();
19079 return BO_AddAssign;
19081 return BO_MulAssign;
19083 return BO_AndAssign;
19085 return BO_OrAssign;
19087 return BO_XorAssign;
19135 case OO_Array_Delete:
19144 case OO_GreaterEqual:
19146 case OO_MinusEqual:
19148 case OO_SlashEqual:
19149 case OO_PercentEqual:
19150 case OO_CaretEqual:
19154 case OO_GreaterGreater:
19155 case OO_LessLessEqual:
19156 case OO_GreaterGreaterEqual:
19157 case OO_EqualEqual:
19158 case OO_ExclaimEqual:
19161 case OO_MinusMinus:
19167 case OO_Conditional:
19170 llvm_unreachable(
"Unexpected reduction identifier");
19173 if (II->isStr(
"max"))
19175 else if (II->isStr(
"min"))
19181 if (ReductionIdScopeSpec.
isValid())
19187 auto IR = UnresolvedReductions.begin(), ER = UnresolvedReductions.end();
19188 bool FirstIter =
true;
19189 for (
Expr *RefExpr : VarList) {
19190 assert(RefExpr &&
"nullptr expr in OpenMP reduction clause.");
19198 if (!FirstIter && IR != ER)
19203 Expr *SimpleRefExpr = RefExpr;
19212 S, ELoc, ERange, Stack->getCurScope(), ReductionIdScopeSpec,
19213 ReductionId,
Type, BasePath, IR == ER ?
nullptr : *IR);
19214 Expr *ReductionOp =
nullptr;
19216 (DeclareReductionRef.
isUnset() ||
19217 isa<UnresolvedLookupExpr>(DeclareReductionRef.
get())))
19218 ReductionOp = DeclareReductionRef.
get();
19220 RD.push(RefExpr, ReductionOp);
19226 Expr *TaskgroupDescriptor =
nullptr;
19228 auto *ASE = dyn_cast<ArraySubscriptExpr>(RefExpr->IgnoreParens());
19229 auto *OASE = dyn_cast<OMPArraySectionExpr>(RefExpr->IgnoreParens());
19231 Type = ASE->getType().getNonReferenceType();
19236 Type = ATy->getElementType();
19239 Type =
Type.getNonReferenceType();
19243 auto *VD = dyn_cast<VarDecl>(D);
19249 diag::err_omp_reduction_incomplete_type))
19255 false, ASE || OASE))
19262 if (!ASE && !OASE) {
19264 VarDecl *VDDef = VD->getDefinition();
19266 DSARefChecker Check(Stack);
19267 if (Check.Visit(VDDef->
getInit())) {
19268 S.
Diag(ELoc, diag::err_omp_reduction_ref_type_arg)
19269 << getOpenMPClauseName(ClauseKind) << ERange;
19287 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(D,
false);
19288 if (DVar.CKind == OMPC_reduction) {
19289 S.
Diag(ELoc, diag::err_omp_once_referenced)
19290 << getOpenMPClauseName(ClauseKind);
19292 S.
Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_referenced);
19295 if (DVar.CKind != OMPC_unknown) {
19296 S.
Diag(ELoc, diag::err_omp_wrong_dsa)
19297 << getOpenMPClauseName(DVar.CKind)
19298 << getOpenMPClauseName(OMPC_reduction);
19310 DVar = Stack->getImplicitDSA(D,
true);
19311 if (DVar.CKind != OMPC_shared) {
19312 S.
Diag(ELoc, diag::err_omp_required_access)
19313 << getOpenMPClauseName(OMPC_reduction)
19314 << getOpenMPClauseName(OMPC_shared);
19322 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(D,
false);
19323 if (DVar.CKind == OMPC_threadprivate) {
19324 S.
Diag(ELoc, diag::err_omp_wrong_dsa)
19325 << getOpenMPClauseName(DVar.CKind)
19326 << getOpenMPClauseName(OMPC_reduction);
19336 S, ELoc, ERange, Stack->getCurScope(), ReductionIdScopeSpec,
19337 ReductionId,
Type, BasePath, IR == ER ?
nullptr : *IR);
19341 (DeclareReductionRef.
isUnset() ||
19342 isa<UnresolvedLookupExpr>(DeclareReductionRef.
get()))) {
19343 RD.push(RefExpr, DeclareReductionRef.
get());
19346 if (BOK == BO_Comma && DeclareReductionRef.
isUnset()) {
19349 diag::err_omp_unknown_reduction_identifier)
19350 <<
Type << ReductionIdRange;
19362 if (DeclareReductionRef.
isUnset()) {
19363 if ((BOK == BO_GT || BOK == BO_LT) &&
19366 S.
Diag(ELoc, diag::err_omp_clause_not_arithmetic_type_arg)
19367 << getOpenMPClauseName(ClauseKind) << S.
getLangOpts().CPlusPlus;
19368 if (!ASE && !OASE) {
19369 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
19372 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19377 if ((BOK == BO_OrAssign || BOK == BO_AndAssign || BOK == BO_XorAssign) &&
19379 S.
Diag(ELoc, diag::err_omp_clause_floating_type_arg)
19380 << getOpenMPClauseName(ClauseKind);
19381 if (!ASE && !OASE) {
19382 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
19385 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19392 Type =
Type.getNonLValueExprType(Context).getUnqualifiedType();
19401 bool ConstantLengthOASE =
false;
19403 bool SingleElement;
19406 Context, OASE, SingleElement, ArraySizes);
19409 if (ConstantLengthOASE && !SingleElement) {
19417 if ((OASE && !ConstantLengthOASE) ||
19422 S.
Diag(ELoc, diag::err_omp_reduction_vla_unsupported) << !!OASE;
19423 S.
Diag(ELoc, diag::note_vla_unsupported);
19426 S.
targetDiag(ELoc, diag::err_omp_reduction_vla_unsupported) << !!OASE;
19427 S.
targetDiag(ELoc, diag::note_vla_unsupported);
19440 }
else if (!ASE && !OASE &&
19448 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
19450 Expr *Init =
nullptr;
19453 if (DeclareReductionRef.
isUsable()) {
19455 auto *DRD = cast<OMPDeclareReductionDecl>(DRDRef->getDecl());
19456 if (DRD->getInitializer()) {
19482 Type = ComplexTy->getElementType();
19491 llvm::APInt InitValue = llvm::APInt::getAllOnes(Size);
19514 (BOK != BO_LT) ? IsSigned ? llvm::APInt::getSignedMinValue(Size)
19515 : llvm::APInt::getMinValue(Size)
19516 : IsSigned ? llvm::APInt::getSignedMaxValue(Size)
19517 : llvm::APInt::getMaxValue(Size);
19559 llvm_unreachable(
"Unexpected reduction operation");
19562 if (Init && DeclareReductionRef.
isUnset()) {
19568 }
else if (!Init) {
19578 S.
Diag(ELoc, diag::err_omp_reduction_id_not_compatible)
19579 <<
Type << ReductionIdRange;
19580 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
19583 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19589 if (DeclareReductionRef.
isUsable()) {
19590 QualType RedTy = DeclareReductionRef.
get()->getType();
19594 if (!BasePath.empty()) {
19598 Context, PtrRedTy, CK_UncheckedDerivedToBase, LHS.
get(), &BasePath,
19601 Context, PtrRedTy, CK_UncheckedDerivedToBase, RHS.
get(), &BasePath,
19605 QualType Params[] = {PtrRedTy, PtrRedTy};
19620 CombBOK, LHSDRE, RHSDRE);
19627 if (BOK != BO_LT && BOK != BO_GT) {
19630 BO_Assign, LHSDRE, ReductionOp.
get());
19632 auto *ConditionalOp =
new (Context)
19637 BO_Assign, LHSDRE, ConditionalOp);
19650 ExprResult CopyOpRes, TempArrayRes, TempArrayElem;
19651 if (ClauseKind == OMPC_reduction &&
19652 RD.RedModifier == OMPC_REDUCTION_inscan) {
19654 CopyOpRes = S.
BuildBinOp(Stack->getCurScope(), ELoc, BO_Assign, LHSDRE,
19664 if (Stack->getCurrentDirective() == OMPD_simd ||
19702 if (ClauseKind == OMPC_in_reduction) {
19705 const Expr *ParentReductionOp =
nullptr;
19706 Expr *ParentBOKTD =
nullptr, *ParentReductionOpTD =
nullptr;
19707 DSAStackTy::DSAVarData ParentBOKDSA =
19708 Stack->getTopMostTaskgroupReductionData(D, ParentSR, ParentBOK,
19710 DSAStackTy::DSAVarData ParentReductionOpDSA =
19711 Stack->getTopMostTaskgroupReductionData(
19712 D, ParentSR, ParentReductionOp, ParentReductionOpTD);
19713 bool IsParentBOK = ParentBOKDSA.DKind != OMPD_unknown;
19714 bool IsParentReductionOp = ParentReductionOpDSA.DKind != OMPD_unknown;
19715 if ((DeclareReductionRef.
isUnset() && IsParentReductionOp) ||
19716 (DeclareReductionRef.
isUsable() && IsParentBOK) ||
19717 (IsParentBOK && BOK != ParentBOK) || IsParentReductionOp) {
19718 bool EmitError =
true;
19719 if (IsParentReductionOp && DeclareReductionRef.
isUsable()) {
19720 llvm::FoldingSetNodeID RedId, ParentRedId;
19721 ParentReductionOp->
Profile(ParentRedId, Context,
true);
19722 DeclareReductionRef.
get()->Profile(RedId, Context,
19724 EmitError = RedId != ParentRedId;
19728 diag::err_omp_reduction_identifier_mismatch)
19729 << ReductionIdRange << RefExpr->getSourceRange();
19731 diag::note_omp_previous_reduction_identifier)
19733 << (IsParentBOK ? ParentBOKDSA.RefExpr
19734 : ParentReductionOpDSA.RefExpr)
19735 ->getSourceRange();
19739 TaskgroupDescriptor = IsParentBOK ? ParentBOKTD : ParentReductionOpTD;
19746 TransformExprToCaptures RebuildToCapture(S, D);
19748 RebuildToCapture.TransformExpr(RefExpr->IgnoreParens()).get();
19749 Ref = RebuildToCapture.getCapturedExpr();
19751 VarsExpr = Ref =
buildCapture(S, D, SimpleRefExpr,
false);
19754 RD.ExprCaptures.emplace_back(Ref->
getDecl());
19760 S.
BuildBinOp(Stack->getCurScope(), ELoc, BO_Assign, SimpleRefExpr,
19765 Stack->getCurrentDirective() == OMPD_taskgroup) {
19766 S.
Diag(RefExpr->getExprLoc(),
19767 diag::err_omp_reduction_non_addressable_expression)
19768 << RefExpr->getSourceRange();
19771 RD.ExprPostUpdates.emplace_back(
19778 unsigned Modifier = RD.RedModifier;
19781 if (CurrDir == OMPD_taskgroup && ClauseKind == OMPC_task_reduction)
19782 Modifier = OMPC_REDUCTION_task;
19783 Stack->addDSA(D, RefExpr->IgnoreParens(), OMPC_reduction, Ref, Modifier,
19785 if (Modifier == OMPC_REDUCTION_task &&
19786 (CurrDir == OMPD_taskgroup ||
19790 if (DeclareReductionRef.
isUsable())
19791 Stack->addTaskgroupReductionData(D, ReductionIdRange,
19792 DeclareReductionRef.
get());
19794 Stack->addTaskgroupReductionData(D, ReductionIdRange, BOK);
19796 RD.push(VarsExpr, PrivateDRE, LHSDRE, RHSDRE, ReductionOp.
get(),
19797 TaskgroupDescriptor, CopyOpRes.
get(), TempArrayRes.
get(),
19798 TempArrayElem.
get());
19800 return RD.Vars.empty();
19810 Diag(LParenLoc, diag::err_omp_unexpected_clause_value)
19813 << getOpenMPClauseName(OMPC_reduction);
19821 if (Modifier == OMPC_REDUCTION_inscan &&
19822 (
DSAStack->getCurrentDirective() != OMPD_for &&
19823 DSAStack->getCurrentDirective() != OMPD_for_simd &&
19824 DSAStack->getCurrentDirective() != OMPD_simd &&
19825 DSAStack->getCurrentDirective() != OMPD_parallel_for &&
19826 DSAStack->getCurrentDirective() != OMPD_parallel_for_simd)) {
19827 Diag(ModifierLoc, diag::err_omp_wrong_inscan_reduction);
19831 ReductionData RD(VarList.size(), Modifier);
19833 StartLoc, LParenLoc, ColonLoc, EndLoc,
19834 ReductionIdScopeSpec, ReductionId,
19835 UnresolvedReductions, RD))
19839 Context, StartLoc, LParenLoc, ModifierLoc, ColonLoc, EndLoc, Modifier,
19841 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps, RD.InscanCopyOps,
19842 RD.InscanCopyArrayTemps, RD.InscanCopyArrayElems,
19852 ReductionData RD(VarList.size());
19854 StartLoc, LParenLoc, ColonLoc, EndLoc,
19855 ReductionIdScopeSpec, ReductionId,
19856 UnresolvedReductions, RD))
19860 Context, StartLoc, LParenLoc, ColonLoc, EndLoc, RD.Vars,
19862 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps,
19872 ReductionData RD(VarList.size());
19874 StartLoc, LParenLoc, ColonLoc, EndLoc,
19875 ReductionIdScopeSpec, ReductionId,
19876 UnresolvedReductions, RD))
19880 Context, StartLoc, LParenLoc, ColonLoc, EndLoc, RD.Vars,
19882 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps, RD.TaskgroupDescriptors,
19889 if ((!
LangOpts.CPlusPlus && LinKind != OMPC_LINEAR_val) ||
19891 Diag(LinLoc, diag::err_omp_wrong_linear_modifier) <<
LangOpts.CPlusPlus;
19899 bool IsDeclareSimd) {
19900 const auto *VD = dyn_cast_or_null<VarDecl>(D);
19904 if ((LinKind == OMPC_LINEAR_uval || LinKind == OMPC_LINEAR_ref) &&
19906 Diag(ELoc, diag::err_omp_wrong_linear_modifier_non_reference)
19910 Type =
Type.getNonReferenceType();
19917 if (!IsDeclareSimd &&
19922 Type =
Type.getUnqualifiedType().getCanonicalType();
19923 const auto *Ty =
Type.getTypePtrOrNull();
19924 if (!Ty || (LinKind != OMPC_LINEAR_ref && !Ty->isDependentType() &&
19925 !Ty->isIntegralType(
Context) && !Ty->isPointerType())) {
19926 Diag(ELoc, diag::err_omp_linear_expected_int_or_ptr) <<
Type;
19928 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
19931 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19949 LinKind = OMPC_LINEAR_val;
19950 for (
Expr *RefExpr : VarList) {
19951 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
19954 Expr *SimpleRefExpr = RefExpr;
19958 Vars.push_back(RefExpr);
19959 Privates.push_back(
nullptr);
19960 Inits.push_back(
nullptr);
19967 auto *VD = dyn_cast<VarDecl>(D);
19973 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
19974 if (DVar.RefExpr) {
19975 Diag(ELoc, diag::err_omp_wrong_dsa) << getOpenMPClauseName(DVar.CKind)
19976 << getOpenMPClauseName(OMPC_linear);
19983 Type =
Type.getNonReferenceType().getUnqualifiedType().getCanonicalType();
19989 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
19998 ExprCaptures.push_back(Ref->
getDecl());
20005 SimpleRefExpr, RefRes.
get());
20008 ExprPostUpdates.push_back(
20013 if (LinKind == OMPC_LINEAR_uval)
20014 InitExpr = VD ? VD->getInit() : SimpleRefExpr;
20016 InitExpr = VD ? SimpleRefExpr : Ref;
20021 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_linear, Ref);
20023 ? RefExpr->IgnoreParens()
20025 Privates.push_back(PrivateRef);
20026 Inits.push_back(InitRef);
20032 Expr *StepExpr = Step;
20033 Expr *CalcStepExpr =
nullptr;
20041 StepExpr = Val.
get();
20049 BuildBinOp(CurScope, StepLoc, BO_Assign, SaveRef.
get(), StepExpr);
20054 if (std::optional<llvm::APSInt> Result =
20056 if (!Result->isNegative() && !Result->isStrictlyPositive())
20057 Diag(StepLoc, diag::warn_omp_linear_step_zero)
20058 << Vars[0] << (Vars.size() > 1);
20062 CalcStepExpr = CalcStep.
get();
20067 ColonLoc, EndLoc, Vars, Privates, Inits,
20068 StepExpr, CalcStepExpr,
20074 Expr *NumIterations,
Sema &SemaRef,
20075 Scope *S, DSAStackTy *Stack) {
20087 Step = cast<BinaryOperator>(CalcStep)->getLHS();
20088 bool HasErrors =
false;
20089 auto CurInit = Clause.
inits().begin();
20090 auto CurPrivate = Clause.
privates().begin();
20095 Expr *SimpleRefExpr = RefExpr;
20096 auto Res =
getPrivateItem(SemaRef, SimpleRefExpr, ELoc, ERange);
20098 if (Res.second || !D) {
20099 Updates.push_back(
nullptr);
20100 Finals.push_back(
nullptr);
20104 auto &&Info = Stack->isLoopControlVariable(D);
20111 diag::err_omp_linear_distribute_var_non_loop_iteration);
20112 Updates.push_back(
nullptr);
20113 Finals.push_back(
nullptr);
20117 Expr *InitExpr = *CurInit;
20120 auto *DE = cast<DeclRefExpr>(SimpleRefExpr);
20122 if (LinKind == OMPC_LINEAR_uval)
20123 CapturedRef = cast<VarDecl>(DE->getDecl())->getInit();
20127 DE->getType().getUnqualifiedType(), DE->getExprLoc(),
20134 SemaRef, S, RefExpr->getExprLoc(), *CurPrivate, InitExpr, IV, Step,
20145 S, RefExpr->getExprLoc(), BO_Assign, CapturedRef,
20148 Final = *CurPrivate;
20152 if (!
Update.isUsable() || !Final.isUsable()) {
20153 Updates.push_back(
nullptr);
20154 Finals.push_back(
nullptr);
20155 UsedExprs.push_back(
nullptr);
20158 Updates.push_back(
Update.get());
20159 Finals.push_back(Final.get());
20161 UsedExprs.push_back(SimpleRefExpr);
20167 UsedExprs.push_back(S);
20169 UsedExprs.append(Clause.
varlist_size() + 1 - UsedExprs.size(),
nullptr);
20180 for (
Expr *RefExpr : VarList) {
20181 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
20184 Expr *SimpleRefExpr = RefExpr;
20188 Vars.push_back(RefExpr);
20195 auto *VD = dyn_cast<VarDecl>(D);
20203 Diag(ELoc, diag::err_omp_aligned_expected_array_or_ptr)
20205 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
20208 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
20215 if (
const Expr *PrevRef =
DSAStack->addUniqueAligned(D, SimpleRefExpr)) {
20216 Diag(ELoc, diag::err_omp_used_in_clause_twice)
20217 << 0 << getOpenMPClauseName(OMPC_aligned) << ERange;
20218 Diag(PrevRef->getExprLoc(), diag::note_omp_explicit_dsa)
20219 << getOpenMPClauseName(OMPC_aligned);
20227 (VD || !Ref) ? RefExpr->IgnoreParens() : Ref)
20236 if (Alignment !=
nullptr) {
20238 VerifyPositiveIntegerConstantInClause(Alignment, OMPC_aligned);
20241 Alignment = AlignResult.
get();
20247 EndLoc, Vars, Alignment);
20258 for (
Expr *RefExpr : VarList) {
20259 assert(RefExpr &&
"NULL expr in OpenMP copyin clause.");
20260 if (isa<DependentScopeDeclRefExpr>(RefExpr)) {
20262 Vars.push_back(RefExpr);
20263 SrcExprs.push_back(
nullptr);
20264 DstExprs.push_back(
nullptr);
20265 AssignmentOps.push_back(
nullptr);
20274 auto *DE = dyn_cast<DeclRefExpr>(RefExpr);
20275 if (!DE || !isa<VarDecl>(DE->getDecl())) {
20276 Diag(ELoc, diag::err_omp_expected_var_name_member_expr)
20277 << 0 << RefExpr->getSourceRange();
20281 Decl *D = DE->getDecl();
20282 auto *VD = cast<VarDecl>(D);
20287 Vars.push_back(DE);
20288 SrcExprs.push_back(
nullptr);
20289 DstExprs.push_back(
nullptr);
20290 AssignmentOps.push_back(
nullptr);
20296 if (!
DSAStack->isThreadPrivate(VD)) {
20297 Diag(ELoc, diag::err_omp_required_access)
20298 << getOpenMPClauseName(OMPC_copyin)
20299 << getOpenMPDirectiveName(OMPD_threadprivate);
20314 buildVarDecl(*
this, DE->getBeginLoc(), ElemType,
".copyin.dst",
20321 BuildBinOp(
nullptr, DE->getExprLoc(), BO_Assign, PseudoDstExpr,
20330 DSAStack->addDSA(VD, DE, OMPC_copyin);
20331 Vars.push_back(DE);
20332 SrcExprs.push_back(PseudoSrcExpr);
20333 DstExprs.push_back(PseudoDstExpr);
20334 AssignmentOps.push_back(AssignmentOp.
get());
20341 SrcExprs, DstExprs, AssignmentOps);
20352 for (
Expr *RefExpr : VarList) {
20353 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
20356 Expr *SimpleRefExpr = RefExpr;
20360 Vars.push_back(RefExpr);
20361 SrcExprs.push_back(
nullptr);
20362 DstExprs.push_back(
nullptr);
20363 AssignmentOps.push_back(
nullptr);
20370 auto *VD = dyn_cast<VarDecl>(D);
20375 if (!VD || !
DSAStack->isThreadPrivate(VD)) {
20376 DSAStackTy::DSAVarData DVar =
20378 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_copyprivate &&
20380 Diag(ELoc, diag::err_omp_wrong_dsa)
20381 << getOpenMPClauseName(DVar.CKind)
20382 << getOpenMPClauseName(OMPC_copyprivate);
20390 if (DVar.CKind == OMPC_unknown) {
20391 DVar =
DSAStack->getImplicitDSA(D,
false);
20392 if (DVar.CKind == OMPC_shared) {
20393 Diag(ELoc, diag::err_omp_required_access)
20394 << getOpenMPClauseName(OMPC_copyprivate)
20395 <<
"threadprivate or private in the enclosing context";
20404 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
20405 << getOpenMPClauseName(OMPC_copyprivate) <<
Type
20406 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
20407 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
Context) ==
20410 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
20430 DSAStack->getCurScope(), ELoc, BO_Assign, PseudoDstExpr, PseudoSrcExpr);
20442 VD ? RefExpr->IgnoreParens()
20444 SrcExprs.push_back(PseudoSrcExpr);
20445 DstExprs.push_back(PseudoDstExpr);
20446 AssignmentOps.push_back(AssignmentOp.
get());
20453 Vars, SrcExprs, DstExprs, AssignmentOps);
20460 if (VarList.empty())
20468 bool Diagnose =
true) {
20469 QualType OMPDependT = Stack->getOMPDependT();
20470 if (!OMPDependT.
isNull())
20476 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_depend_t";
20479 Stack->setOMPDependT(PT.
get());
20496 (OMPDependTFound &&
20499 Diag(Depobj->
getExprLoc(), diag::err_omp_expected_omp_depend_t_lvalue)
20504 Diag(Depobj->
getExprLoc(), diag::err_omp_expected_omp_depend_t_lvalue)
20518 if (
DSAStack->getCurrentDirective() == OMPD_ordered &&
20519 DepKind != OMPC_DEPEND_source && DepKind != OMPC_DEPEND_sink) {
20520 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
20521 <<
"'source' or 'sink'" << getOpenMPClauseName(OMPC_depend);
20524 if (
DSAStack->getCurrentDirective() == OMPD_taskwait &&
20525 DepKind == OMPC_DEPEND_mutexinoutset) {
20526 Diag(DepLoc, diag::err_omp_taskwait_depend_mutexinoutset_not_allowed);
20529 if ((
DSAStack->getCurrentDirective() != OMPD_ordered ||
20530 DSAStack->getCurrentDirective() == OMPD_depobj) &&
20532 DepKind == OMPC_DEPEND_sink ||
20534 DSAStack->getCurrentDirective() == OMPD_depobj) &&
20535 DepKind == OMPC_DEPEND_depobj))) {
20537 OMPC_DEPEND_outallmemory,
20538 OMPC_DEPEND_inoutallmemory};
20539 if (
LangOpts.OpenMP < 50 ||
DSAStack->getCurrentDirective() == OMPD_depobj)
20540 Except.push_back(OMPC_DEPEND_depobj);
20542 Except.push_back(OMPC_DEPEND_inoutset);
20544 ?
"depend modifier(iterator) or "
20546 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
20550 << getOpenMPClauseName(OMPC_depend);
20554 (DepKind == OMPC_DEPEND_source || DepKind == OMPC_DEPEND_sink)) {
20556 diag::err_omp_depend_sink_source_with_modifier);
20561 Diag(DepModifier->
getExprLoc(), diag::err_omp_depend_modifier_not_iterator);
20567 if (DepKind == OMPC_DEPEND_sink || DepKind == OMPC_DEPEND_source) {
20568 if (
const Expr *OrderedCountExpr =
20569 DSAStack->getParentOrderedRegionParam().first) {
20570 TotalDepCount = OrderedCountExpr->EvaluateKnownConstInt(
Context);
20571 TotalDepCount.setIsUnsigned(
true);
20574 for (
Expr *RefExpr : VarList) {
20575 assert(RefExpr &&
"NULL expr in OpenMP shared clause.");
20576 if (isa<DependentScopeDeclRefExpr>(RefExpr)) {
20578 Vars.push_back(RefExpr);
20584 if (DepKind == OMPC_DEPEND_sink) {
20585 if (
DSAStack->getParentOrderedRegionParam().first &&
20586 DepCounter >= TotalDepCount) {
20587 Diag(ELoc, diag::err_omp_depend_sink_unexpected_expr);
20601 Vars.push_back(RefExpr);
20607 Expr *LHS = SimpleExpr;
20608 Expr *RHS =
nullptr;
20609 if (
auto *BO = dyn_cast<BinaryOperator>(SimpleExpr)) {
20611 OOLoc = BO->getOperatorLoc();
20614 }
else if (
auto *OCE = dyn_cast<CXXOperatorCallExpr>(SimpleExpr)) {
20615 OOK = OCE->getOperator();
20616 OOLoc = OCE->getOperatorLoc();
20619 }
else if (
auto *MCE = dyn_cast<CXXMemberCallExpr>(SimpleExpr)) {
20620 OOK = MCE->getMethodDecl()
20623 .getCXXOverloadedOperator();
20624 OOLoc = MCE->getCallee()->getExprLoc();
20633 Vars.push_back(RefExpr);
20639 if (OOK != OO_Plus && OOK != OO_Minus && (RHS || OOK !=
OO_None)) {
20640 Diag(OOLoc, diag::err_omp_depend_sink_expected_plus_minus);
20644 ExprResult RHSRes = VerifyPositiveIntegerConstantInClause(
20645 RHS, OMPC_depend,
false);
20650 DSAStack->getParentOrderedRegionParam().first &&
20651 DepCounter !=
DSAStack->isParentLoopControlVariable(D).first) {
20653 DSAStack->getParentLoopControlVariable(DepCounter.getZExtValue());
20655 Diag(ELoc, diag::err_omp_depend_sink_expected_loop_iteration)
20658 Diag(ELoc, diag::err_omp_depend_sink_expected_loop_iteration) << 0;
20661 OpsOffs.emplace_back(RHS, OOK);
20663 bool OMPDependTFound =
LangOpts.OpenMP >= 50;
20664 if (OMPDependTFound)
20666 DepKind == OMPC_DEPEND_depobj);
20667 if (DepKind == OMPC_DEPEND_depobj) {
20671 if (!RefExpr->isValueDependent() && !RefExpr->isTypeDependent() &&
20672 !RefExpr->isInstantiationDependent() &&
20673 !RefExpr->containsUnexpandedParameterPack() &&
20674 (OMPDependTFound &&
20676 RefExpr->getType()))) {
20677 Diag(ELoc, diag::err_omp_expected_omp_depend_t_lvalue)
20678 << 0 << RefExpr->getType() << RefExpr->getSourceRange();
20681 if (!RefExpr->isLValue()) {
20682 Diag(ELoc, diag::err_omp_expected_omp_depend_t_lvalue)
20683 << 1 << RefExpr->getType() << RefExpr->getSourceRange();
20690 QualType ExprTy = RefExpr->getType().getNonReferenceType();
20691 const auto *OASE = dyn_cast<OMPArraySectionExpr>(SimpleExpr);
20696 ExprTy = ATy->getElementType();
20700 const Expr *Length = OASE->getLength();
20704 Result.Val.getInt().isZero()) {
20706 diag::err_omp_depend_zero_length_array_section_not_allowed)
20716 if (!RefExpr->isValueDependent() && !RefExpr->isTypeDependent() &&
20717 !RefExpr->isInstantiationDependent() &&
20718 !RefExpr->containsUnexpandedParameterPack() &&
20719 (!RefExpr->IgnoreParenImpCasts()->isLValue() ||
20720 (OMPDependTFound &&
20722 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
20723 << (
LangOpts.OpenMP >= 50 ? 1 : 0)
20724 << (
LangOpts.OpenMP >= 50 ? 1 : 0) << RefExpr->getSourceRange();
20728 auto *ASE = dyn_cast<ArraySubscriptExpr>(SimpleExpr);
20729 if (ASE && !ASE->getBase()->isTypeDependent() &&
20730 !ASE->getBase()->getType().getNonReferenceType()->isPointerType() &&
20731 !ASE->getBase()->getType().getNonReferenceType()->isArrayType()) {
20732 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
20733 << (
LangOpts.OpenMP >= 50 ? 1 : 0)
20734 << (
LangOpts.OpenMP >= 50 ? 1 : 0) << RefExpr->getSourceRange();
20742 RefExpr->IgnoreParenImpCasts());
20744 if (!Res.
isUsable() && !isa<OMPArraySectionExpr>(SimpleExpr) &&
20745 !isa<OMPArrayShapingExpr>(SimpleExpr)) {
20746 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
20747 << (
LangOpts.OpenMP >= 50 ? 1 : 0)
20748 << (
LangOpts.OpenMP >= 50 ? 1 : 0) << RefExpr->getSourceRange();
20753 Vars.push_back(RefExpr->IgnoreParenImpCasts());
20757 TotalDepCount > VarList.size() &&
20758 DSAStack->getParentOrderedRegionParam().first &&
20759 DSAStack->getParentLoopControlVariable(VarList.size() + 1)) {
20760 Diag(EndLoc, diag::err_omp_depend_sink_expected_loop_iteration)
20761 << 1 <<
DSAStack->getParentLoopControlVariable(VarList.size() + 1);
20763 if (DepKind != OMPC_DEPEND_source && DepKind != OMPC_DEPEND_sink &&
20764 DepKind != OMPC_DEPEND_outallmemory &&
20765 DepKind != OMPC_DEPEND_inoutallmemory && Vars.empty())
20769 Context, StartLoc, LParenLoc, EndLoc,
20770 {DepKind, DepLoc,
Data.ColonLoc,
Data.OmpAllMemoryLoc}, DepModifier, Vars,
20771 TotalDepCount.getZExtValue());
20772 if ((DepKind == OMPC_DEPEND_sink || DepKind == OMPC_DEPEND_source) &&
20773 DSAStack->isParentOrderedRegion())
20774 DSAStack->addDoacrossDependClause(C, OpsOffs);
20784 "Unexpected device modifier in OpenMP < 50.");
20786 bool ErrorFound =
false;
20790 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
20791 << Values << getOpenMPClauseName(OMPC_device);
20795 Expr *ValExpr = Device;
20796 Stmt *HelperValStmt =
nullptr;
20809 if (Modifier == OMPC_DEVICE_ancestor) {
20813 diag::err_omp_device_ancestor_without_requires_reverse_offload);
20823 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
20824 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
20829 OMPDeviceClause(Modifier, ValExpr, HelperValStmt, CaptureRegion, StartLoc,
20830 LParenLoc, ModifierLoc, EndLoc);
20835 bool FullCheck =
true) {
20840 SemaRef.
Diag(SL, diag::warn_omp_non_trivial_type_mapped) << QTy << SR;
20850 const auto *OASE = dyn_cast<OMPArraySectionExpr>(E);
20855 if (isa<ArraySubscriptExpr>(E) ||
20856 (OASE && OASE->getColonLocFirst().isInvalid())) {
20857 if (
const auto *ATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr()))
20858 return ATy->getSize().getSExtValue() != 1;
20863 assert(OASE &&
"Expecting array section if not an array subscript.");
20864 const Expr *LowerBound = OASE->getLowerBound();
20865 const Expr *Length = OASE->getLength();
20875 if (ConstLowerBound.getSExtValue())
20890 const auto *CATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr());
20899 return CATy->getSize().getSExtValue() != ConstLength.getSExtValue();
20908 const auto *OASE = dyn_cast<OMPArraySectionExpr>(E);
20912 if (isa<ArraySubscriptExpr>(E) ||
20913 (OASE && OASE->getColonLocFirst().isInvalid()))
20916 assert(OASE &&
"Expecting array section if not an array subscript.");
20917 const Expr *Length = OASE->getLength();
20923 if (
const auto *ATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr()))
20924 return ATy->getSize().getSExtValue() != 1;
20935 return ConstLength.getSExtValue() != 1;
20974 class MapBaseChecker final :
public StmtVisitor<MapBaseChecker, bool> {
20979 bool IsNonContiguous =
false;
20980 bool NoDiagnose =
false;
20981 const Expr *RelevantExpr =
nullptr;
20982 bool AllowUnitySizeArraySection =
true;
20983 bool AllowWholeSizeArraySection =
true;
20984 bool AllowAnotherPtr =
true;
20988 void emitErrorMsg() {
20990 if (SemaRef.getLangOpts().OpenMP < 50) {
20992 diag::err_omp_expected_named_var_member_or_array_expression)
20995 SemaRef.Diag(ELoc, diag::err_omp_non_lvalue_in_map_or_motion_clauses)
20996 << getOpenMPClauseName(CKind) << ERange;
21002 if (!isa<VarDecl>(DRE->
getDecl())) {
21006 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
21007 RelevantExpr = DRE;
21009 Components.emplace_back(DRE, DRE->
getDecl(), IsNonContiguous);
21017 if (isa<CXXThisExpr>(BaseE)) {
21018 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
21027 SemaRef.Diag(ELoc, diag::err_omp_expected_access_to_data_field)
21043 SemaRef.Diag(ELoc, diag::err_omp_bit_fields_forbidden_in_clause)
21063 SemaRef.Diag(ELoc, diag::err_omp_union_type_not_allowed)
21067 return RelevantExpr || Visit(E);
21077 AllowUnitySizeArraySection =
false;
21078 AllowWholeSizeArraySection =
false;
21081 Components.emplace_back(ME, FD, IsNonContiguous);
21082 return RelevantExpr || Visit(E);
21090 SemaRef.Diag(ELoc, diag::err_omp_expected_base_var_name)
21094 return RelevantExpr || Visit(E);
21101 AllowWholeSizeArraySection =
false;
21107 !Result.Val.getInt().isZero()) {
21109 diag::err_omp_invalid_map_this_expr);
21111 diag::note_omp_invalid_subscript_on_this_ptr_map);
21113 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
21118 Components.emplace_back(AE,
nullptr, IsNonContiguous);
21120 return RelevantExpr || Visit(E);
21126 assert(!(SemaRef.getLangOpts().OpenMP < 50 && NoDiagnose) &&
21127 "Array sections cannot be implicitly mapped.");
21141 SemaRef.Diag(ELoc, diag::err_omp_expected_base_var_name)
21151 if (AllowWholeSizeArraySection) {
21158 if (NotWhole || IsPointer)
21159 AllowWholeSizeArraySection =
false;
21160 }
else if (DKind == OMPD_target_update &&
21161 SemaRef.getLangOpts().OpenMP >= 50) {
21162 if (IsPointer && !AllowAnotherPtr)
21163 SemaRef.Diag(ELoc, diag::err_omp_section_length_undefined)
21166 IsNonContiguous =
true;
21167 }
else if (AllowUnitySizeArraySection && NotUnity) {
21173 diag::err_array_section_does_not_specify_contiguous_storage)
21179 AllowAnotherPtr =
false;
21181 if (
const auto *TE = dyn_cast<CXXThisExpr>(E)) {
21188 diag::err_omp_invalid_map_this_expr);
21190 diag::note_omp_invalid_length_on_this_ptr_mapping);
21194 SemaRef.getASTContext()) &&
21197 diag::err_omp_invalid_map_this_expr);
21199 diag::note_omp_invalid_lower_bound_on_this_ptr_mapping);
21201 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
21206 Components.emplace_back(OASE,
nullptr,
false);
21207 return RelevantExpr || Visit(E);
21213 Components.emplace_back(E,
nullptr, IsNonContiguous);
21215 return Visit(
Base->IgnoreParenImpCasts());
21219 if (SemaRef.getLangOpts().OpenMP < 50 || !UO->
isLValue() ||
21224 if (!RelevantExpr) {
21226 Components.emplace_back(UO,
nullptr,
false);
21242 Components.emplace_back(BO,
nullptr,
false);
21245 "Either LHS or RHS have base decl inside");
21247 return RelevantExpr || Visit(
LE);
21248 return RelevantExpr || Visit(RE);
21251 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
21252 RelevantExpr = CTE;
21253 Components.emplace_back(CTE,
nullptr, IsNonContiguous);
21257 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
21258 Components.emplace_back(COCE,
nullptr, IsNonContiguous);
21267 return Visit(Source);
21269 bool VisitStmt(
Stmt *) {
21273 const Expr *getFoundBase()
const {
return RelevantExpr; }
21274 explicit MapBaseChecker(
21278 : SemaRef(SemaRef), CKind(CKind), DKind(DKind), Components(Components),
21279 NoDiagnose(NoDiagnose), ELoc(ELoc), ERange(ERange) {}
21293 MapBaseChecker Checker(SemaRef, CKind, DKind, CurComponents, NoDiagnose, ELoc,
21297 if (SemaRef.
getLangOpts().OpenMP >= 50 && !CurComponents.empty() &&
21298 (CKind == OMPC_to || CKind == OMPC_from)) {
21299 auto CI = CurComponents.rbegin();
21300 auto CE = CurComponents.rend();
21301 for (; CI != CE; ++CI) {
21303 dyn_cast<OMPArraySectionExpr>(CI->getAssociatedExpression());
21308 SemaRef.
Diag(ELoc, diag::err_array_section_does_not_specify_length)
21312 return Checker.getFoundBase();
21321 bool CurrentRegionOnly,
21332 assert(!CurComponents.empty() &&
"Map clause expression with no components!");
21333 assert(CurComponents.back().getAssociatedDeclaration() == VD &&
21334 "Map clause expression with unexpected base!");
21337 bool IsEnclosedByDataEnvironmentExpr =
false;
21338 const Expr *EnclosingExpr =
nullptr;
21340 bool FoundError = DSAS->checkMappableExprComponentListsForDecl(
21341 VD, CurrentRegionOnly,
21342 [&IsEnclosedByDataEnvironmentExpr, &SemaRef, VD, CurrentRegionOnly, ELoc,
21343 ERange, CKind, &EnclosingExpr,
21349 assert(!StackComponents.empty() &&
21350 "Map clause expression with no components!");
21351 assert(StackComponents.back().getAssociatedDeclaration() == VD &&
21352 "Map clause expression with unexpected base!");
21356 const Expr *RE = StackComponents.front().getAssociatedExpression();
21362 auto CI = CurComponents.rbegin();
21363 auto CE = CurComponents.rend();
21364 auto SI = StackComponents.rbegin();
21365 auto SE = StackComponents.rend();
21366 for (; CI != CE && SI != SE; ++CI, ++SI) {
21371 if (CurrentRegionOnly &&
21372 (isa<ArraySubscriptExpr>(CI->getAssociatedExpression()) ||
21373 isa<OMPArraySectionExpr>(CI->getAssociatedExpression()) ||
21374 isa<OMPArrayShapingExpr>(CI->getAssociatedExpression())) &&
21375 (isa<ArraySubscriptExpr>(SI->getAssociatedExpression()) ||
21376 isa<OMPArraySectionExpr>(SI->getAssociatedExpression()) ||
21377 isa<OMPArrayShapingExpr>(SI->getAssociatedExpression()))) {
21378 SemaRef.Diag(CI->getAssociatedExpression()->getExprLoc(),
21379 diag::err_omp_multiple_array_items_in_map_clause)
21380 << CI->getAssociatedExpression()->getSourceRange();
21381 SemaRef.Diag(SI->getAssociatedExpression()->getExprLoc(),
21382 diag::note_used_here)
21383 << SI->getAssociatedExpression()->getSourceRange();
21388 if (CI->getAssociatedExpression()->getStmtClass() !=
21389 SI->getAssociatedExpression()->getStmtClass())
21393 if (CI->getAssociatedDeclaration() != SI->getAssociatedDeclaration())
21399 for (; SI != SE; ++SI) {
21401 if (
const auto *ASE =
21402 dyn_cast<ArraySubscriptExpr>(SI->getAssociatedExpression())) {
21403 Type = ASE->getBase()->IgnoreParenImpCasts()->getType();
21404 }
else if (
const auto *OASE = dyn_cast<OMPArraySectionExpr>(
21405 SI->getAssociatedExpression())) {
21406 const Expr *E = OASE->getBase()->IgnoreParenImpCasts();
21408 OMPArraySectionExpr::getBaseOriginalType(E).getCanonicalType();
21409 }
else if (
const auto *OASE = dyn_cast<OMPArrayShapingExpr>(
21410 SI->getAssociatedExpression())) {
21411 Type = OASE->getBase()->getType()->getPointeeType();
21415 SemaRef, SI->getAssociatedExpression(),
Type))
21425 if (CI == CE && SI == SE) {
21426 if (CurrentRegionOnly) {
21427 if (CKind == OMPC_map) {
21428 SemaRef.
Diag(ELoc, diag::err_omp_map_shared_storage) << ERange;
21430 assert(CKind == OMPC_to || CKind == OMPC_from);
21431 SemaRef.
Diag(ELoc, diag::err_omp_once_referenced_in_target_update)
21440 IsEnclosedByDataEnvironmentExpr =
true;
21445 std::prev(CI)->getAssociatedDeclaration()->getType();
21447 std::prev(CI)->getAssociatedExpression()->getExprLoc();
21466 if (CI == CE || SI == SE) {
21469 diag::err_omp_pointer_mapped_along_with_derived_section)
21475 if (CI->getAssociatedExpression()->getStmtClass() !=
21476 SI->getAssociatedExpression()->getStmtClass() ||
21477 CI->getAssociatedDeclaration()->getCanonicalDecl() ==
21478 SI->getAssociatedDeclaration()->getCanonicalDecl()) {
21479 assert(CI != CE && SI != SE);
21480 SemaRef.
Diag(DerivedLoc, diag::err_omp_same_pointer_dereferenced)
21493 if (CurrentRegionOnly && (CI == CE || SI == SE)) {
21494 if (CKind == OMPC_map) {
21495 if (CI != CE || SI != SE) {
21499 CI != CE ? CurComponents.begin() : StackComponents.begin();
21500 auto End = CI != CE ? CurComponents.end() : StackComponents.end();
21502 while (It !=
End && !It->getAssociatedDeclaration())
21504 assert(It !=
End &&
21505 "Expected at least one component with the declaration.");
21506 if (It !=
Begin && It->getAssociatedDeclaration()
21508 .getCanonicalType()
21509 ->isAnyPointerType()) {
21510 IsEnclosedByDataEnvironmentExpr =
false;
21511 EnclosingExpr =
nullptr;
21515 SemaRef.
Diag(ELoc, diag::err_omp_map_shared_storage) << ERange;
21517 assert(CKind == OMPC_to || CKind == OMPC_from);
21518 SemaRef.
Diag(ELoc, diag::err_omp_once_referenced_in_target_update)
21528 if (!CurrentRegionOnly && SI != SE)
21529 EnclosingExpr = RE;
21533 IsEnclosedByDataEnvironmentExpr |=
21534 (!CurrentRegionOnly && CI != CE && SI == SE);
21539 if (CurrentRegionOnly)
21553 if (EnclosingExpr && !IsEnclosedByDataEnvironmentExpr) {
21555 diag::err_omp_original_storage_is_shared_and_does_not_contain)
21571 Expr *UnresolvedMapper) {
21586 while (S && !S->isDeclScope(D))
21587 S = S->getParent();
21589 S = S->getParent();
21590 Lookups.emplace_back();
21591 Lookups.back().append(Lookup.
begin(), Lookup.
end());
21594 }
else if (
auto *ULE = cast_or_null<UnresolvedLookupExpr>(UnresolvedMapper)) {
21598 auto *DMD = cast<OMPDeclareMapperDecl>(D);
21599 assert(DMD &&
"Expect valid OMPDeclareMapperDecl during instantiation.");
21600 Lookups.back().addDecl(DMD);
21608 filterLookupForUDReductionAndMapper<bool>(Lookups, [](
ValueDecl *D) {
21609 return !D->isInvalidDecl() &&
21610 (D->getType()->isDependentType() ||
21611 D->getType()->isInstantiationDependentType() ||
21612 D->getType()->containsUnexpandedParameterPack());
21618 URS.
append(Set.begin(), Set.end());
21623 false,
true, URS.
begin(), URS.
end());
21630 SemaRef.
Diag(Loc, diag::err_omp_mapper_wrong_type);
21637 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
21647 if (
auto *VD = filterLookupForUDReductionAndMapper<ValueDecl *>(
21670 SemaRef.
Diag(Loc, diag::err_omp_invalid_mapper)
21680 struct MappableVarListInfo {
21695 VarComponents.reserve(VarList.size());
21696 VarBaseDeclarations.reserve(VarList.size());
21714 bool IsMapTypeImplicit =
false,
bool NoDiagnose =
false) {
21716 assert((CKind == OMPC_map || CKind == OMPC_to || CKind == OMPC_from) &&
21717 "Unexpected clause kind with mappable expressions!");
21725 MapperId.
setName(DeclNames.getIdentifier(
21727 MapperId.
setLoc(StartLoc);
21731 auto UMIt = UnresolvedMappers.begin(), UMEnd = UnresolvedMappers.end();
21732 bool UpdateUMIt =
false;
21733 Expr *UnresolvedMapper =
nullptr;
21735 bool HasHoldModifier =
21736 llvm::is_contained(Modifiers, OMPC_MAP_MODIFIER_ompx_hold);
21744 for (
Expr *RE : MVLI.VarList) {
21745 assert(RE &&
"Null expr in omp to/from/map clause");
21749 if (UpdateUMIt && UMIt != UMEnd) {
21753 "Expect the size of UnresolvedMappers to match with that of VarList");
21757 UnresolvedMapper = *UMIt;
21766 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
21770 MVLI.UDMapperList.push_back(ER.
get());
21773 MVLI.ProcessedVarList.push_back(RE);
21782 ELoc, diag::err_omp_expected_named_var_member_or_array_expression)
21785 SemaRef.
Diag(ELoc, diag::err_omp_non_lvalue_in_map_or_motion_clauses)
21798 DSAS->getCurrentDirective(), NoDiagnose);
21802 assert(!CurComponents.empty() &&
21803 "Invalid mappable expression information.");
21805 if (
const auto *TE = dyn_cast<CXXThisExpr>(BE)) {
21807 DSAS->addMappedClassesQualTypes(TE->getType());
21810 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
21814 MVLI.UDMapperList.push_back(ER.
get());
21816 MVLI.ProcessedVarList.push_back(RE);
21817 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
21818 MVLI.VarComponents.back().append(CurComponents.begin(),
21819 CurComponents.end());
21820 MVLI.VarBaseDeclarations.push_back(
nullptr);
21827 CurDeclaration = CurComponents.back().getAssociatedDeclaration();
21828 assert(CurDeclaration &&
"Null decl on map clause.");
21831 "Expecting components to have associated only canonical declarations.");
21833 auto *VD = dyn_cast<VarDecl>(CurDeclaration);
21834 const auto *FD = dyn_cast<FieldDecl>(CurDeclaration);
21836 assert((VD || FD) &&
"Only variables or fields are expected here!");
21843 if (VD && DSAS->isThreadPrivate(VD)) {
21846 DSAStackTy::DSAVarData DVar = DSAS->getTopDSA(VD,
false);
21847 SemaRef.
Diag(ELoc, diag::err_omp_threadprivate_in_clause)
21848 << getOpenMPClauseName(CKind);
21862 true, CurComponents, CKind))
21864 if (CKind == OMPC_map &&
21867 false, CurComponents, CKind))
21874 auto I = llvm::find_if(
21879 assert(I != CurComponents.end() &&
"Null decl on map clause.");
21882 auto *ASE = dyn_cast<ArraySubscriptExpr>(VE->
IgnoreParens());
21883 auto *OASE = dyn_cast<OMPArraySectionExpr>(VE->
IgnoreParens());
21884 auto *OAShE = dyn_cast<OMPArrayShapingExpr>(VE->
IgnoreParens());
21886 Type = ASE->getType().getNonReferenceType();
21891 Type = ATy->getElementType();
21894 Type =
Type.getNonReferenceType();
21895 }
else if (OAShE) {
21909 if (CKind == OMPC_map) {
21916 if (DKind == OMPD_target_enter_data &&
21917 !(MapType == OMPC_MAP_to || MapType == OMPC_MAP_alloc ||
21919 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
21920 << (IsMapTypeImplicit ? 1 : 0)
21922 << getOpenMPDirectiveName(DKind);
21931 if (DKind == OMPD_target_exit_data &&
21932 !(MapType == OMPC_MAP_from || MapType == OMPC_MAP_release ||
21933 MapType == OMPC_MAP_delete || SemaRef.
getLangOpts().OpenMP >= 52)) {
21934 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
21935 << (IsMapTypeImplicit ? 1 : 0)
21937 << getOpenMPDirectiveName(DKind);
21946 if ((DKind == OMPD_target_enter_data || DKind == OMPD_target_exit_data) &&
21948 SemaRef.
Diag(StartLoc,
21949 diag::err_omp_invalid_map_type_modifier_for_directive)
21951 OMPC_MAP_MODIFIER_ompx_hold)
21952 << getOpenMPDirectiveName(DKind);
21960 if ((DKind == OMPD_target_data ||
21962 !(MapType == OMPC_MAP_to || MapType == OMPC_MAP_from ||
21963 MapType == OMPC_MAP_tofrom || MapType == OMPC_MAP_alloc)) {
21964 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
21965 << (IsMapTypeImplicit ? 1 : 0)
21967 << getOpenMPDirectiveName(DKind);
21979 if (VD && ((SemaRef.
LangOpts.OpenMP <= 45 &&
21981 DKind == OMPD_target)) {
21982 DSAStackTy::DSAVarData DVar = DSAS->getTopDSA(VD,
false);
21984 SemaRef.
Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
21985 << getOpenMPClauseName(DVar.CKind)
21986 << getOpenMPClauseName(OMPC_map)
21987 << getOpenMPDirectiveName(DSAS->getCurrentDirective());
21996 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
21997 Type.getCanonicalType(), UnresolvedMapper);
22000 MVLI.UDMapperList.push_back(ER.
get());
22003 MVLI.ProcessedVarList.push_back(RE);
22007 DSAS->addMappableExpressionComponents(CurDeclaration, CurComponents,
22013 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
22014 MVLI.VarComponents.back().append(CurComponents.begin(),
22015 CurComponents.end());
22016 MVLI.VarBaseDeclarations.push_back(isa<MemberExpr>(BE) ?
nullptr
22036 BuiltinType::OMPIterator))
22038 diag::err_omp_map_modifier_not_iterator);
22041 unsigned Count = 0;
22042 for (
unsigned I = 0, E = MapTypeModifiers.size(); I < E; ++I) {
22044 llvm::is_contained(Modifiers, MapTypeModifiers[I])) {
22045 Diag(MapTypeModifiersLoc[I], diag::err_omp_duplicate_map_type_modifier);
22049 "Modifiers exceed the allowed number of map type modifiers");
22050 Modifiers[Count] = MapTypeModifiers[I];
22051 ModifiersLoc[Count] = MapTypeModifiersLoc[I];
22055 MappableVarListInfo MVLI(VarList);
22057 MapperIdScopeSpec, MapperId, UnresolvedMappers,
22058 MapType, Modifiers, IsMapTypeImplicit,
22064 Context, Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
22065 MVLI.VarComponents, MVLI.UDMapperList, IteratorModifier, Modifiers,
22067 MapType, IsMapTypeImplicit, MapLoc);
22075 if (ReductionType.
isNull())
22083 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 0;
22088 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 1;
22092 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 2;
22096 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 3;
22099 return ReductionType;
22104 ArrayRef<std::pair<QualType, SourceLocation>> ReductionTypes,
22107 Decls.reserve(ReductionTypes.size());
22115 llvm::DenseMap<QualType, SourceLocation> PreviousRedeclTypes;
22117 bool InCompoundScope =
true;
22118 if (S !=
nullptr) {
22127 llvm::DenseMap<OMPDeclareReductionDecl *, bool> UsedAsPrevious;
22129 while (
Filter.hasNext()) {
22130 auto *PrevDecl = cast<OMPDeclareReductionDecl>(
Filter.next());
22131 if (InCompoundScope) {
22132 auto I = UsedAsPrevious.find(PrevDecl);
22133 if (I == UsedAsPrevious.end())
22134 UsedAsPrevious[PrevDecl] =
false;
22136 UsedAsPrevious[D] =
true;
22138 PreviousRedeclTypes[PrevDecl->getType().getCanonicalType()] =
22139 PrevDecl->getLocation();
22142 if (InCompoundScope) {
22143 for (
const auto &PrevData : UsedAsPrevious) {
22144 if (!PrevData.second) {
22145 PrevDRD = PrevData.first;
22150 }
else if (PrevDeclInScope !=
nullptr) {
22151 auto *PrevDRDInScope = PrevDRD =
22152 cast<OMPDeclareReductionDecl>(PrevDeclInScope);
22154 PreviousRedeclTypes[PrevDRDInScope->getType().getCanonicalType()] =
22155 PrevDRDInScope->getLocation();
22156 PrevDRDInScope = PrevDRDInScope->getPrevDeclInScope();
22157 }
while (PrevDRDInScope !=
nullptr);
22159 for (
const auto &TyData : ReductionTypes) {
22160 const auto I = PreviousRedeclTypes.find(TyData.first.getCanonicalType());
22161 bool Invalid =
false;
22162 if (I != PreviousRedeclTypes.end()) {
22163 Diag(TyData.second, diag::err_omp_declare_reduction_redefinition)
22165 Diag(I->second, diag::note_previous_definition);
22168 PreviousRedeclTypes[TyData.first.getCanonicalType()] = TyData.second;
22170 Name, TyData.first, PrevDRD);
22172 DRD->setAccess(AS);
22173 Decls.push_back(DRD);
22175 DRD->setInvalidDecl();
22185 auto *DRD = cast<OMPDeclareReductionDecl>(D);
22200 QualType ReductionType = DRD->getType();
22217 if (S !=
nullptr) {
22221 DRD->addDecl(OmpInParm);
22222 DRD->addDecl(OmpOutParm);
22228 DRD->setCombinerData(InE, OutE);
22232 auto *DRD = cast<OMPDeclareReductionDecl>(D);
22239 if (Combiner !=
nullptr)
22240 DRD->setCombiner(Combiner);
22242 DRD->setInvalidDecl();
22246 auto *DRD = cast<OMPDeclareReductionDecl>(D);
22260 QualType ReductionType = DRD->getType();
22277 if (S !=
nullptr) {
22281 DRD->addDecl(OmpPrivParm);
22282 DRD->addDecl(OmpOrigParm);
22288 DRD->setInitializerData(OrigE, PrivE);
22289 return OmpPrivParm;
22294 auto *DRD = cast<OMPDeclareReductionDecl>(D);
22303 }
else if (OmpPrivParm->
hasInit()) {
22304 DRD->setInitializer(OmpPrivParm->
getInit(),
22309 DRD->setInvalidDecl();
22315 for (
Decl *D : DeclReductions.
get()) {
22321 D->setInvalidDecl();
22324 return DeclReductions;
22343 assert(
ParsedType.isUsable() &&
"Expect usable parsed mapper type");
22346 assert(!MapperType.
isNull() &&
"Expect valid mapper type");
22351 Diag(TyLoc, diag::err_omp_mapper_wrong_type);
22367 llvm::DenseMap<QualType, SourceLocation> PreviousRedeclTypes;
22369 bool InCompoundScope =
true;
22370 if (S !=
nullptr) {
22379 llvm::DenseMap<OMPDeclareMapperDecl *, bool> UsedAsPrevious;
22381 while (
Filter.hasNext()) {
22382 auto *PrevDecl = cast<OMPDeclareMapperDecl>(
Filter.next());
22383 if (InCompoundScope) {
22384 auto I = UsedAsPrevious.find(PrevDecl);
22385 if (I == UsedAsPrevious.end())
22386 UsedAsPrevious[PrevDecl] =
false;
22388 UsedAsPrevious[D] =
true;
22390 PreviousRedeclTypes[PrevDecl->getType().getCanonicalType()] =
22391 PrevDecl->getLocation();
22394 if (InCompoundScope) {
22395 for (
const auto &PrevData : UsedAsPrevious) {
22396 if (!PrevData.second) {
22397 PrevDMD = PrevData.first;
22402 }
else if (PrevDeclInScope) {
22403 auto *PrevDMDInScope = PrevDMD =
22404 cast<OMPDeclareMapperDecl>(PrevDeclInScope);
22406 PreviousRedeclTypes[PrevDMDInScope->getType().getCanonicalType()] =
22407 PrevDMDInScope->getLocation();
22408 PrevDMDInScope = PrevDMDInScope->getPrevDeclInScope();
22409 }
while (PrevDMDInScope !=
nullptr);
22412 bool Invalid =
false;
22413 if (I != PreviousRedeclTypes.end()) {
22414 Diag(StartLoc, diag::err_omp_declare_mapper_redefinition)
22415 << MapperType << Name;
22416 Diag(I->second, diag::note_previous_definition);
22427 ClausesWithImplicit, PrevDMD);
22432 DMD->setAccess(AS);
22434 DMD->setInvalidDecl();
22436 auto *VD = cast<DeclRefExpr>(MapperVarRef)->getDecl();
22440 DMD->setMapperVarRef(MapperVarRef);
22457 DSAStack->addDeclareMapperVarRef(E);
22462 if (
DSAStack->getDeclareMapperVarRef())
22467 assert(
LangOpts.OpenMP &&
"Expected OpenMP mode.");
22469 if (
const auto *DRE = cast_or_null<DeclRefExpr>(Ref)) {
22482 assert(
LangOpts.OpenMP &&
"Expected OpenMP mode.");
22483 return cast<DeclRefExpr>(
DSAStack->getDeclareMapperVarRef())->getDecl();
22490 Expr *ValExpr = NumTeams;
22491 Stmt *HelperValStmt =
nullptr;
22504 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
22505 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
22510 StartLoc, LParenLoc, EndLoc);
22517 Expr *ValExpr = ThreadLimit;
22518 Stmt *HelperValStmt =
nullptr;
22528 DKind, OMPC_thread_limit,
LangOpts.OpenMP);
22531 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
22532 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
22537 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
22545 Stmt *HelperValStmt =
nullptr;
22551 ValExpr, *
this, OMPC_priority,
22553 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
22557 StartLoc, LParenLoc, EndLoc);
22565 "Unexpected grainsize modifier in OpenMP < 51.");
22570 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
22571 << Values << getOpenMPClauseName(OMPC_grainsize);
22575 Expr *ValExpr = Grainsize;
22576 Stmt *HelperValStmt =
nullptr;
22586 &CaptureRegion, &HelperValStmt))
22591 StartLoc, LParenLoc, ModifierLoc, EndLoc);
22599 "Unexpected num_tasks modifier in OpenMP < 51.");
22604 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
22605 << Values << getOpenMPClauseName(OMPC_num_tasks);
22609 Expr *ValExpr = NumTasks;
22610 Stmt *HelperValStmt =
nullptr;
22617 ValExpr, *
this, OMPC_num_tasks,
22619 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
22624 StartLoc, LParenLoc, ModifierLoc, EndLoc);
22634 VerifyPositiveIntegerConstantInClause(Hint, OMPC_hint,
false);
22643 DSAStackTy *Stack) {
22644 QualType OMPEventHandleT = Stack->getOMPEventHandleT();
22645 if (!OMPEventHandleT.
isNull())
22650 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_event_handle_t";
22653 Stack->setOMPEventHandleT(PT.
get());
22673 auto *VD = dyn_cast_or_null<VarDecl>(Ref->
getDecl());
22683 <<
"omp_event_handle_t" << 1 << VD->
getType()
22690 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(VD,
false);
22691 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_firstprivate &&
22694 << getOpenMPClauseName(DVar.CKind)
22695 << getOpenMPClauseName(OMPC_firstprivate);
22713 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
22714 << Values << getOpenMPClauseName(OMPC_dist_schedule);
22717 Expr *ValExpr = ChunkSize;
22718 Stmt *HelperValStmt =
nullptr;
22729 ValExpr = Val.
get();
22734 if (std::optional<llvm::APSInt> Result =
22736 if (Result->isSigned() && !Result->isStrictlyPositive()) {
22737 Diag(ChunkSizeLoc, diag::err_omp_negative_expression_in_clause)
22742 DSAStack->getCurrentDirective(), OMPC_dist_schedule,
22743 LangOpts.OpenMP) != OMPD_unknown &&
22746 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
22747 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
22755 Kind, ValExpr, HelperValStmt);
22763 if (M != OMPC_DEFAULTMAP_MODIFIER_tofrom ||
22764 Kind != OMPC_DEFAULTMAP_scalar) {
22768 if (M != OMPC_DEFAULTMAP_MODIFIER_tofrom) {
22770 OMPC_DEFAULTMAP_MODIFIER_tofrom);
22774 OMPC_DEFAULTMAP_scalar);
22778 Diag(Loc, diag::err_omp_unexpected_clause_value)
22779 <<
Value << getOpenMPClauseName(OMPC_defaultmap);
22786 if (!isDefaultmapKind || !isDefaultmapModifier) {
22787 StringRef KindValue =
"'scalar', 'aggregate', 'pointer'";
22789 StringRef ModifierValue =
"'alloc', 'from', 'to', 'tofrom', "
22790 "'firstprivate', 'none', 'default'";
22791 if (!isDefaultmapKind && isDefaultmapModifier) {
22792 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
22793 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
22794 }
else if (isDefaultmapKind && !isDefaultmapModifier) {
22795 Diag(MLoc, diag::err_omp_unexpected_clause_value)
22796 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
22798 Diag(MLoc, diag::err_omp_unexpected_clause_value)
22799 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
22800 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
22801 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
22804 StringRef ModifierValue =
22805 "'alloc', 'from', 'to', 'tofrom', "
22806 "'firstprivate', 'none', 'default', 'present'";
22807 if (!isDefaultmapKind && isDefaultmapModifier) {
22808 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
22809 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
22810 }
else if (isDefaultmapKind && !isDefaultmapModifier) {
22811 Diag(MLoc, diag::err_omp_unexpected_clause_value)
22812 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
22814 Diag(MLoc, diag::err_omp_unexpected_clause_value)
22815 << ModifierValue << getOpenMPClauseName(OMPC_defaultmap);
22816 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
22817 << KindValue << getOpenMPClauseName(OMPC_defaultmap);
22827 Diag(StartLoc, diag::err_omp_one_defaultmap_each_category);
22833 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_aggregate, StartLoc);
22834 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_scalar, StartLoc);
22835 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_pointer, StartLoc);
22845 DeclareTargetContextInfo &DTCI) {
22850 !isa<CXXRecordDecl>(CurLexicalContext) &&
22851 !isa<ClassTemplateDecl>(CurLexicalContext) &&
22852 !isa<ClassTemplatePartialSpecializationDecl>(CurLexicalContext) &&
22853 !isa<ClassTemplateSpecializationDecl>(CurLexicalContext)) {
22854 Diag(DTCI.Loc, diag::err_omp_region_not_file_context);
22857 DeclareTargetNesting.push_back(DTCI);
22861 const Sema::DeclareTargetContextInfo
22863 assert(!DeclareTargetNesting.empty() &&
22864 "check isInOpenMPDeclareTargetContext() first!");
22865 return DeclareTargetNesting.pop_back_val();
22869 DeclareTargetContextInfo &DTCI) {
22870 for (
auto &It : DTCI.ExplicitlyMapped)
22875 if (DeclareTargetNesting.empty())
22877 DeclareTargetContextInfo &DTCI = DeclareTargetNesting.back();
22878 Diag(DTCI.Loc, diag::warn_omp_unterminated_declare_target)
22879 << getOpenMPDirectiveName(DTCI.Kind);
22893 VarOrFuncDeclFilterCCC CCC(*
this);
22903 Diag(
Id.getLoc(), diag::err_undeclared_var_use) <<
Id.getName();
22908 if (!isa<VarDecl>(ND) && !isa<FunctionDecl>(ND) &&
22909 !isa<FunctionTemplateDecl>(ND)) {
22910 Diag(
Id.getLoc(), diag::err_omp_invalid_target_decl) <<
Id.getName();
22917 OMPDeclareTargetDeclAttr::MapTypeTy MT,
22918 DeclareTargetContextInfo &DTCI) {
22919 assert((isa<VarDecl>(ND) || isa<FunctionDecl>(ND) ||
22920 isa<FunctionTemplateDecl>(ND)) &&
22921 "Expected variable, function or function template.");
22927 Diag(Loc, diag::warn_omp_declare_target_after_first_use);
22930 const unsigned Level = -1;
22932 auto *VD = cast<ValueDecl>(ND);
22933 std::optional<OMPDeclareTargetDeclAttr *> ActiveAttr =
22934 OMPDeclareTargetDeclAttr::getActiveAttr(VD);
22935 if (ActiveAttr && (*ActiveAttr)->getDevType() != DTCI.DT &&
22936 (*ActiveAttr)->getLevel() ==
Level) {
22937 Diag(Loc, diag::err_omp_device_type_mismatch)
22938 << OMPDeclareTargetDeclAttr::ConvertDevTypeTyToStr(DTCI.DT)
22939 << OMPDeclareTargetDeclAttr::ConvertDevTypeTyToStr(
22940 (*ActiveAttr)->getDevType());
22943 if (ActiveAttr && (*ActiveAttr)->getMapType() != MT &&
22944 (*ActiveAttr)->getLevel() ==
Level) {
22945 Diag(Loc, diag::err_omp_declare_target_to_and_link) << ND;
22949 if (ActiveAttr && (*ActiveAttr)->getLevel() ==
Level)
22952 Expr *IndirectE =
nullptr;
22953 bool IsIndirect =
false;
22954 if (DTCI.Indirect) {
22955 IndirectE = *DTCI.Indirect;
22959 auto *A = OMPDeclareTargetDeclAttr::CreateImplicit(
22964 ML->DeclarationMarkedOpenMPDeclareTarget(ND, A);
22970 if (!D || !isa<VarDecl>(D))
22972 auto *VD = cast<VarDecl>(D);
22973 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy>
MapTy =
22974 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
22975 if (SemaRef.
LangOpts.OpenMP >= 50 &&
22978 VD->hasGlobalStorage()) {
22979 if (!
MapTy || (*
MapTy != OMPDeclareTargetDeclAttr::MT_To &&
22980 *
MapTy != OMPDeclareTargetDeclAttr::MT_Enter)) {
22987 diag::err_omp_lambda_capture_in_declare_target_not_to);
22988 SemaRef.
Diag(SL, diag::note_var_explicitly_captured_here)
22995 SemaRef.
Diag(VD->
getLocation(), diag::warn_omp_not_in_target_context);
22996 SemaRef.
Diag(SL, diag::note_used_here) << SR;
23000 Sema &SemaRef, DSAStackTy *Stack,
23002 return OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD) ||
23013 if (
auto *VD = dyn_cast<VarDecl>(D)) {
23015 if (!VD->isFileVarDecl() && !VD->isStaticLocal() &&
23016 !VD->isStaticDataMember())
23020 if (
DSAStack->isThreadPrivate(VD)) {
23021 Diag(SL, diag::err_omp_threadprivate_in_target);
23026 if (
const auto *FTD = dyn_cast<FunctionTemplateDecl>(D))
23027 D = FTD->getTemplatedDecl();
23028 if (
auto *FD = dyn_cast<FunctionDecl>(D)) {
23029 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
23030 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(FD);
23031 if (IdLoc.
isValid() && Res && *Res == OMPDeclareTargetDeclAttr::MT_Link) {
23032 Diag(IdLoc, diag::err_omp_function_in_link_clause);
23037 if (
auto *VD = dyn_cast<ValueDecl>(D)) {
23045 if (isa<VarDecl>(D) || isa<FunctionDecl>(D) ||
23046 isa<FunctionTemplateDecl>(D)) {
23047 std::optional<OMPDeclareTargetDeclAttr *> ActiveAttr =
23048 OMPDeclareTargetDeclAttr::getActiveAttr(VD);
23049 unsigned Level = DeclareTargetNesting.size();
23050 if (ActiveAttr && (*ActiveAttr)->getLevel() >=
Level)
23052 DeclareTargetContextInfo &DTCI = DeclareTargetNesting.back();
23053 Expr *IndirectE =
nullptr;
23054 bool IsIndirect =
false;
23055 if (DTCI.Indirect) {
23056 IndirectE = *DTCI.Indirect;
23060 auto *A = OMPDeclareTargetDeclAttr::CreateImplicit(
23062 getLangOpts().OpenMP >= 52 ? OMPDeclareTargetDeclAttr::MT_Enter
23063 : OMPDeclareTargetDeclAttr::MT_To,
23064 DTCI.DT, IndirectE, IsIndirect,
Level,
23068 ML->DeclarationMarkedOpenMPDeclareTarget(D, A);
23089 unsigned Count = 0;
23090 for (
unsigned I = 0, E = MotionModifiers.size(); I < E; ++I) {
23092 llvm::is_contained(Modifiers, MotionModifiers[I])) {
23093 Diag(MotionModifiersLoc[I], diag::err_omp_duplicate_motion_modifier);
23097 "Modifiers exceed the allowed number of motion modifiers");
23098 Modifiers[Count] = MotionModifiers[I];
23099 ModifiersLoc[Count] = MotionModifiersLoc[I];
23103 MappableVarListInfo MVLI(VarList);
23105 MapperIdScopeSpec, MapperId, UnresolvedMappers);
23106 if (MVLI.ProcessedVarList.empty())
23110 Context, Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
23111 MVLI.VarComponents, MVLI.UDMapperList, Modifiers, ModifiersLoc,
23126 unsigned Count = 0;
23127 for (
unsigned I = 0, E = MotionModifiers.size(); I < E; ++I) {
23129 llvm::is_contained(Modifiers, MotionModifiers[I])) {
23130 Diag(MotionModifiersLoc[I], diag::err_omp_duplicate_motion_modifier);
23134 "Modifiers exceed the allowed number of motion modifiers");
23135 Modifiers[Count] = MotionModifiers[I];
23136 ModifiersLoc[Count] = MotionModifiersLoc[I];
23140 MappableVarListInfo MVLI(VarList);
23142 MapperIdScopeSpec, MapperId, UnresolvedMappers);
23143 if (MVLI.ProcessedVarList.empty())
23147 Context, Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
23148 MVLI.VarComponents, MVLI.UDMapperList, Modifiers, ModifiersLoc,
23154 MappableVarListInfo MVLI(VarList);
23158 for (
Expr *RefExpr : VarList) {
23159 assert(RefExpr &&
"NULL expr in OpenMP use_device_ptr clause.");
23162 Expr *SimpleRefExpr = RefExpr;
23166 MVLI.ProcessedVarList.push_back(RefExpr);
23167 PrivateCopies.push_back(
nullptr);
23168 Inits.push_back(
nullptr);
23175 Type =
Type.getNonReferenceType().getUnqualifiedType();
23177 auto *VD = dyn_cast<VarDecl>(D);
23181 Diag(ELoc, diag::err_omp_usedeviceptr_not_a_pointer)
23182 << 0 << RefExpr->getSourceRange();
23190 VD ? cast<DeclRefExpr>(SimpleRefExpr) :
nullptr);
23191 if (VDPrivate->isInvalidDecl())
23196 *
this, VDPrivate, RefExpr->getType().getUnqualifiedType(), ELoc);
23202 *
this, VDInit, RefExpr->
getType(), RefExpr->getExprLoc());
23212 MVLI.ProcessedVarList.push_back(VD ? RefExpr->IgnoreParens() : Ref);
23213 PrivateCopies.push_back(VDPrivateRefExpr);
23214 Inits.push_back(VDInitRefExpr);
23219 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
23223 MVLI.VarBaseDeclarations.push_back(D);
23224 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
23225 MVLI.VarComponents.back().emplace_back(SimpleRefExpr, D,
23229 if (MVLI.ProcessedVarList.empty())
23233 Context, Locs, MVLI.ProcessedVarList, PrivateCopies, Inits,
23234 MVLI.VarBaseDeclarations, MVLI.VarComponents);
23239 MappableVarListInfo MVLI(VarList);
23241 for (
Expr *RefExpr : VarList) {
23242 assert(RefExpr &&
"NULL expr in OpenMP use_device_addr clause.");
23245 Expr *SimpleRefExpr = RefExpr;
23250 MVLI.ProcessedVarList.push_back(RefExpr);
23255 auto *VD = dyn_cast<VarDecl>(D);
23262 MVLI.ProcessedVarList.push_back(VD ? RefExpr->IgnoreParens() : Ref);
23267 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
23271 MVLI.VarBaseDeclarations.push_back(D);
23272 MVLI.VarComponents.emplace_back();
23273 Expr *Component = SimpleRefExpr;
23274 if (VD && (isa<OMPArraySectionExpr>(RefExpr->IgnoreParenImpCasts()) ||
23275 isa<ArraySubscriptExpr>(RefExpr->IgnoreParenImpCasts())))
23277 MVLI.VarComponents.back().emplace_back(Component, D,
23281 if (MVLI.ProcessedVarList.empty())
23285 MVLI.VarBaseDeclarations,
23286 MVLI.VarComponents);
23291 MappableVarListInfo MVLI(VarList);
23292 for (
Expr *RefExpr : VarList) {
23293 assert(RefExpr &&
"NULL expr in OpenMP is_device_ptr clause.");
23296 Expr *SimpleRefExpr = RefExpr;
23300 MVLI.ProcessedVarList.push_back(RefExpr);
23310 Diag(ELoc, diag::err_omp_argument_type_isdeviceptr)
23311 << 0 << RefExpr->getSourceRange();
23317 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
23319 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
23320 << getOpenMPClauseName(DVar.CKind)
23321 << getOpenMPClauseName(OMPC_is_device_ptr)
23322 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
23327 const Expr *ConflictExpr;
23328 if (
DSAStack->checkMappableExprComponentListsForDecl(
23333 ConflictExpr = R.front().getAssociatedExpression();
23336 Diag(ELoc, diag::err_omp_map_shared_storage) << RefExpr->getSourceRange();
23345 SimpleRefExpr, D,
false);
23346 DSAStack->addMappableExpressionComponents(
23347 D, MC, OMPC_is_device_ptr);
23350 MVLI.ProcessedVarList.push_back(SimpleRefExpr);
23355 assert((isa<DeclRefExpr>(SimpleRefExpr) ||
23356 isa<CXXThisExpr>(cast<MemberExpr>(SimpleRefExpr)->
getBase())) &&
23357 "Unexpected device pointer expression!");
23358 MVLI.VarBaseDeclarations.push_back(
23359 isa<DeclRefExpr>(SimpleRefExpr) ? D :
nullptr);
23360 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
23361 MVLI.VarComponents.back().push_back(MC);
23364 if (MVLI.ProcessedVarList.empty())
23368 MVLI.VarBaseDeclarations,
23369 MVLI.VarComponents);
23374 MappableVarListInfo MVLI(VarList);
23375 for (
Expr *RefExpr : VarList) {
23376 assert(RefExpr &&
"NULL expr in OpenMP has_device_addr clause.");
23379 Expr *SimpleRefExpr = RefExpr;
23384 MVLI.ProcessedVarList.push_back(RefExpr);
23392 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
23394 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
23395 << getOpenMPClauseName(DVar.CKind)
23396 << getOpenMPClauseName(OMPC_has_device_addr)
23397 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
23402 const Expr *ConflictExpr;
23403 if (
DSAStack->checkMappableExprComponentListsForDecl(
23408 ConflictExpr = R.front().getAssociatedExpression();
23411 Diag(ELoc, diag::err_omp_map_shared_storage) << RefExpr->getSourceRange();
23419 Expr *Component = SimpleRefExpr;
23420 auto *VD = dyn_cast<VarDecl>(D);
23421 if (VD && (isa<OMPArraySectionExpr>(RefExpr->IgnoreParenImpCasts()) ||
23422 isa<ArraySubscriptExpr>(RefExpr->IgnoreParenImpCasts())))
23425 Component, D,
false);
23426 DSAStack->addMappableExpressionComponents(
23427 D, MC, OMPC_has_device_addr);
23433 assert(Ref &&
"has_device_addr capture failed");
23434 MVLI.ProcessedVarList.push_back(Ref);
23436 MVLI.ProcessedVarList.push_back(RefExpr->IgnoreParens());
23441 assert((isa<DeclRefExpr>(SimpleRefExpr) ||
23442 isa<CXXThisExpr>(cast<MemberExpr>(SimpleRefExpr)->
getBase())) &&
23443 "Unexpected device pointer expression!");
23444 MVLI.VarBaseDeclarations.push_back(
23445 isa<DeclRefExpr>(SimpleRefExpr) ? D :
nullptr);
23446 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
23447 MVLI.VarComponents.back().push_back(MC);
23450 if (MVLI.ProcessedVarList.empty())
23454 MVLI.VarBaseDeclarations,
23455 MVLI.VarComponents);
23471 DSAStack->getOMPAllocatorHandleT(),
23476 Allocator = AllocatorRes.
get();
23485 targetDiag(StartLoc, diag::err_expected_allocator_expression);
23489 for (
Expr *RefExpr : VarList) {
23490 assert(RefExpr &&
"NULL expr in OpenMP private clause.");
23493 Expr *SimpleRefExpr = RefExpr;
23497 Vars.push_back(RefExpr);
23503 auto *VD = dyn_cast<VarDecl>(D);
23508 ? RefExpr->IgnoreParens()
23516 DSAStack->addInnerAllocatorExpr(Allocator);
23518 ColonLoc, EndLoc, Vars);
23526 for (
Expr *RefExpr : VarList) {
23527 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
23530 Expr *SimpleRefExpr = RefExpr;
23534 Vars.push_back(RefExpr);
23541 if (
const Expr *PrevRef =
23542 DSAStack->addUniqueNontemporal(D, SimpleRefExpr)) {
23543 Diag(ELoc, diag::err_omp_used_in_clause_twice)
23544 << 0 << getOpenMPClauseName(OMPC_nontemporal) << ERange;
23545 Diag(PrevRef->getExprLoc(), diag::note_omp_explicit_dsa)
23546 << getOpenMPClauseName(OMPC_nontemporal);
23550 Vars.push_back(RefExpr);
23565 for (
Expr *RefExpr : VarList) {
23566 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
23569 Expr *SimpleRefExpr = RefExpr;
23574 Vars.push_back(RefExpr);
23579 const DSAStackTy::DSAVarData DVar =
23585 if (DVar.CKind != OMPC_reduction || DVar.Modifier != OMPC_REDUCTION_inscan)
23586 Diag(ELoc, diag::err_omp_inclusive_exclusive_not_reduction)
23587 << RefExpr->getSourceRange();
23589 if (
DSAStack->getParentDirective() != OMPD_unknown)
23590 DSAStack->markDeclAsUsedInScanDirective(D);
23591 Vars.push_back(RefExpr);
23605 for (
Expr *RefExpr : VarList) {
23606 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
23609 Expr *SimpleRefExpr = RefExpr;
23614 Vars.push_back(RefExpr);
23620 DSAStackTy::DSAVarData DVar;
23621 if (ParentDirective != OMPD_unknown)
23622 DVar =
DSAStack->getTopDSA(D,
true);
23627 if (ParentDirective == OMPD_unknown || DVar.CKind != OMPC_reduction ||
23628 DVar.Modifier != OMPC_REDUCTION_inscan) {
23629 Diag(ELoc, diag::err_omp_inclusive_exclusive_not_reduction)
23630 << RefExpr->getSourceRange();
23632 DSAStack->markDeclAsUsedInScanDirective(D);
23634 Vars.push_back(RefExpr);
23645 QualType OMPAlloctraitT = Stack->getOMPAlloctraitT();
23646 if (!OMPAlloctraitT.
isNull())
23651 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_alloctrait_t";
23654 Stack->setOMPAlloctraitT(PT.
get());
23673 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
23674 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
23675 StringRef Allocator =
23676 OMPAllocateDeclAttr::ConvertAllocatorTypeTyToStr(AllocatorKind);
23684 Expr *AllocatorExpr =
nullptr;
23692 auto *DRE = dyn_cast<DeclRefExpr>(AllocatorExpr);
23693 bool IsPredefinedAllocator =
false;
23695 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorTy =
23697 IsPredefinedAllocator =
23699 OMPAllocateDeclAttr::AllocatorTypeTy::OMPUserDefinedMemAlloc;
23703 bool IsTypeCompatible = IsPredefinedAllocator;
23704 IsTypeCompatible = IsTypeCompatible ||
23706 OMPAllocatorHandleT);
23708 IsTypeCompatible ||
23710 bool IsNonConstantLValue =
23712 if (!DRE || !IsTypeCompatible ||
23713 (!IsPredefinedAllocator && !IsNonConstantLValue)) {
23715 <<
"omp_allocator_handle_t" << (DRE ? 1 : 0)
23724 diag::err_omp_predefined_allocator_with_traits)
23727 << cast<NamedDecl>(DRE->
getDecl())->getName()
23736 diag::err_omp_nonpredefined_allocator_without_traits);
23744 IsPredefinedAllocator
23745 ? DSAStackTy::UsesAllocatorsDeclKind::PredefinedAllocator
23746 : DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator);
23748 Expr *AllocatorTraitsExpr =
nullptr;
23762 if (
const auto *ConstArrayTy = dyn_cast<ConstantArrayType>(Ty))
23763 TraitTy = ConstArrayTy->getElementType();
23770 diag::err_omp_expected_array_alloctraits)
23771 << AllocatorTraitsExpr->
getType();
23776 if (
auto *DRE = dyn_cast<DeclRefExpr>(AllocatorTraitsExpr))
23779 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait);
23796 for (
Expr *RefExpr : Locators) {
23797 assert(RefExpr &&
"NULL expr in OpenMP shared clause.");
23798 if (isa<DependentScopeDeclRefExpr>(RefExpr) || RefExpr->isTypeDependent()) {
23800 Vars.push_back(RefExpr);
23808 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
23809 << 1 << 0 << RefExpr->getSourceRange();
23818 if (!Res.
isUsable() && !isa<OMPArraySectionExpr>(SimpleExpr) &&
23819 !isa<OMPArrayShapingExpr>(SimpleExpr)) {
23820 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
23821 << 1 << 0 << RefExpr->getSourceRange();
23824 Vars.push_back(SimpleExpr);
23828 EndLoc, Modifier, Vars);
23837 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
23840 << getOpenMPClauseName(OMPC_bind);
23852 Expr *ValExpr = Size;
23853 Stmt *HelperValStmt =
nullptr;
23864 DKind, OMPC_ompx_dyn_cgroup_mem,
LangOpts.OpenMP);
23867 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
23868 ValExpr = tryBuildCapture(*
this, ValExpr, Captures).get();
23873 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);